var/home/core/zuul-output/0000755000175000017500000000000015114060775014533 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114073130015464 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005113567415114073121017704 0ustar rootrootDec 03 16:18:22 crc systemd[1]: Starting Kubernetes Kubelet... Dec 03 16:18:22 crc restorecon[4692]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 03 16:18:22 crc restorecon[4692]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 03 16:18:23 crc kubenswrapper[4768]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:18:23 crc kubenswrapper[4768]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 03 16:18:23 crc kubenswrapper[4768]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:18:23 crc kubenswrapper[4768]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:18:23 crc kubenswrapper[4768]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 03 16:18:23 crc kubenswrapper[4768]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.354550 4768 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358404 4768 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358428 4768 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358435 4768 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358442 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358451 4768 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358459 4768 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358465 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358471 4768 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358477 4768 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358492 4768 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358498 4768 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358504 4768 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358509 4768 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358516 4768 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358522 4768 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358529 4768 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358536 4768 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358542 4768 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358548 4768 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358554 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358563 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358571 4768 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358581 4768 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358608 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358615 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358621 4768 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358627 4768 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358633 4768 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358639 4768 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358644 4768 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358650 4768 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358655 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358662 4768 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358669 4768 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358675 4768 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358681 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358686 4768 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358692 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358697 4768 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358703 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358708 4768 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358714 4768 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358719 4768 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358725 4768 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358730 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358737 4768 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358742 4768 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358750 4768 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358756 4768 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358762 4768 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358768 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358777 4768 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358783 4768 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358788 4768 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358794 4768 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358799 4768 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358804 4768 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358809 4768 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358814 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358819 4768 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358824 4768 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358829 4768 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358835 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358840 4768 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358845 4768 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358850 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358855 4768 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358860 4768 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358866 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358871 4768 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.358876 4768 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359202 4768 flags.go:64] FLAG: --address="0.0.0.0" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359217 4768 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359226 4768 flags.go:64] FLAG: --anonymous-auth="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359234 4768 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359242 4768 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359248 4768 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359257 4768 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359265 4768 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359271 4768 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359277 4768 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359286 4768 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359294 4768 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359300 4768 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359307 4768 flags.go:64] FLAG: --cgroup-root="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359316 4768 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359324 4768 flags.go:64] FLAG: --client-ca-file="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359332 4768 flags.go:64] FLAG: --cloud-config="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359339 4768 flags.go:64] FLAG: --cloud-provider="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359347 4768 flags.go:64] FLAG: --cluster-dns="[]" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359357 4768 flags.go:64] FLAG: --cluster-domain="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359365 4768 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359374 4768 flags.go:64] FLAG: --config-dir="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359382 4768 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359390 4768 flags.go:64] FLAG: --container-log-max-files="5" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359399 4768 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359407 4768 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359413 4768 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359420 4768 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359427 4768 flags.go:64] FLAG: --contention-profiling="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359435 4768 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359444 4768 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359452 4768 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359459 4768 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359469 4768 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359478 4768 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359486 4768 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359494 4768 flags.go:64] FLAG: --enable-load-reader="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359501 4768 flags.go:64] FLAG: --enable-server="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359508 4768 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359515 4768 flags.go:64] FLAG: --event-burst="100" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359522 4768 flags.go:64] FLAG: --event-qps="50" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359529 4768 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359536 4768 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359543 4768 flags.go:64] FLAG: --eviction-hard="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359551 4768 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359556 4768 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359564 4768 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359570 4768 flags.go:64] FLAG: --eviction-soft="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359577 4768 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359583 4768 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359589 4768 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359615 4768 flags.go:64] FLAG: --experimental-mounter-path="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359622 4768 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359628 4768 flags.go:64] FLAG: --fail-swap-on="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359634 4768 flags.go:64] FLAG: --feature-gates="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359642 4768 flags.go:64] FLAG: --file-check-frequency="20s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359648 4768 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359655 4768 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359661 4768 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359667 4768 flags.go:64] FLAG: --healthz-port="10248" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359673 4768 flags.go:64] FLAG: --help="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359679 4768 flags.go:64] FLAG: --hostname-override="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359685 4768 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359691 4768 flags.go:64] FLAG: --http-check-frequency="20s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359697 4768 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359703 4768 flags.go:64] FLAG: --image-credential-provider-config="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359710 4768 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359716 4768 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359722 4768 flags.go:64] FLAG: --image-service-endpoint="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359728 4768 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359733 4768 flags.go:64] FLAG: --kube-api-burst="100" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359739 4768 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359745 4768 flags.go:64] FLAG: --kube-api-qps="50" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359751 4768 flags.go:64] FLAG: --kube-reserved="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359758 4768 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359764 4768 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359770 4768 flags.go:64] FLAG: --kubelet-cgroups="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359776 4768 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359782 4768 flags.go:64] FLAG: --lock-file="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359789 4768 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359795 4768 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359801 4768 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359810 4768 flags.go:64] FLAG: --log-json-split-stream="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359817 4768 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359823 4768 flags.go:64] FLAG: --log-text-split-stream="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359830 4768 flags.go:64] FLAG: --logging-format="text" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359835 4768 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359842 4768 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359847 4768 flags.go:64] FLAG: --manifest-url="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359854 4768 flags.go:64] FLAG: --manifest-url-header="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359862 4768 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359869 4768 flags.go:64] FLAG: --max-open-files="1000000" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359878 4768 flags.go:64] FLAG: --max-pods="110" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359884 4768 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359891 4768 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359898 4768 flags.go:64] FLAG: --memory-manager-policy="None" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359905 4768 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359912 4768 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359920 4768 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359927 4768 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359941 4768 flags.go:64] FLAG: --node-status-max-images="50" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359948 4768 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359954 4768 flags.go:64] FLAG: --oom-score-adj="-999" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359960 4768 flags.go:64] FLAG: --pod-cidr="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359966 4768 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359976 4768 flags.go:64] FLAG: --pod-manifest-path="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359982 4768 flags.go:64] FLAG: --pod-max-pids="-1" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359990 4768 flags.go:64] FLAG: --pods-per-core="0" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.359997 4768 flags.go:64] FLAG: --port="10250" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360003 4768 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360010 4768 flags.go:64] FLAG: --provider-id="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360015 4768 flags.go:64] FLAG: --qos-reserved="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360021 4768 flags.go:64] FLAG: --read-only-port="10255" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360027 4768 flags.go:64] FLAG: --register-node="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360033 4768 flags.go:64] FLAG: --register-schedulable="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360039 4768 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360049 4768 flags.go:64] FLAG: --registry-burst="10" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360055 4768 flags.go:64] FLAG: --registry-qps="5" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360062 4768 flags.go:64] FLAG: --reserved-cpus="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360068 4768 flags.go:64] FLAG: --reserved-memory="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360075 4768 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360081 4768 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360087 4768 flags.go:64] FLAG: --rotate-certificates="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360094 4768 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360100 4768 flags.go:64] FLAG: --runonce="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360106 4768 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360112 4768 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360118 4768 flags.go:64] FLAG: --seccomp-default="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360124 4768 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360131 4768 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360137 4768 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360144 4768 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360150 4768 flags.go:64] FLAG: --storage-driver-password="root" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360156 4768 flags.go:64] FLAG: --storage-driver-secure="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360162 4768 flags.go:64] FLAG: --storage-driver-table="stats" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360168 4768 flags.go:64] FLAG: --storage-driver-user="root" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360175 4768 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360183 4768 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360191 4768 flags.go:64] FLAG: --system-cgroups="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360204 4768 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360216 4768 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360224 4768 flags.go:64] FLAG: --tls-cert-file="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360231 4768 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360242 4768 flags.go:64] FLAG: --tls-min-version="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360248 4768 flags.go:64] FLAG: --tls-private-key-file="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360254 4768 flags.go:64] FLAG: --topology-manager-policy="none" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360260 4768 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360266 4768 flags.go:64] FLAG: --topology-manager-scope="container" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360272 4768 flags.go:64] FLAG: --v="2" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360280 4768 flags.go:64] FLAG: --version="false" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360288 4768 flags.go:64] FLAG: --vmodule="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360295 4768 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.360301 4768 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360461 4768 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360473 4768 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360481 4768 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360488 4768 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360495 4768 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360502 4768 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360509 4768 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360516 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360523 4768 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360531 4768 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360538 4768 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360544 4768 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360552 4768 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360559 4768 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360565 4768 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360571 4768 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360577 4768 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360583 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360616 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360622 4768 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360628 4768 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360633 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360638 4768 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360643 4768 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360648 4768 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360654 4768 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360659 4768 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360664 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360669 4768 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360674 4768 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360680 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360685 4768 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360691 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360696 4768 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360701 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360706 4768 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360713 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360721 4768 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360728 4768 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360734 4768 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360746 4768 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360753 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360759 4768 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360766 4768 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360772 4768 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360778 4768 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360783 4768 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360788 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360793 4768 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360798 4768 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360805 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360810 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360816 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360821 4768 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360826 4768 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360831 4768 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360836 4768 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360843 4768 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360850 4768 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360856 4768 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360861 4768 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360867 4768 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360872 4768 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360878 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360883 4768 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360890 4768 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360897 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360905 4768 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360912 4768 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360919 4768 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.360925 4768 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.361305 4768 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.372661 4768 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.372716 4768 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372809 4768 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372827 4768 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372837 4768 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372846 4768 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372853 4768 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372859 4768 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372863 4768 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372869 4768 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372874 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372879 4768 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372884 4768 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372889 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372893 4768 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372899 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372905 4768 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372914 4768 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372921 4768 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372927 4768 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372934 4768 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372940 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372946 4768 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372951 4768 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372957 4768 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372962 4768 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372967 4768 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372972 4768 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372977 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372982 4768 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372989 4768 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.372995 4768 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373000 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373005 4768 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373010 4768 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373015 4768 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373022 4768 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373027 4768 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373032 4768 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373038 4768 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373043 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373048 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373053 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373058 4768 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373063 4768 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373068 4768 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373073 4768 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373078 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373083 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373089 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373093 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373098 4768 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373104 4768 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373109 4768 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373116 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373121 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373126 4768 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373131 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373136 4768 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373142 4768 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373148 4768 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373154 4768 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373159 4768 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373164 4768 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373169 4768 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373174 4768 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373179 4768 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373184 4768 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373189 4768 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373194 4768 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373198 4768 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373204 4768 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373210 4768 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.373220 4768 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373385 4768 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373394 4768 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373399 4768 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373406 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373411 4768 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373416 4768 feature_gate.go:330] unrecognized feature gate: Example Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373421 4768 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373426 4768 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373431 4768 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373437 4768 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373443 4768 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373447 4768 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373453 4768 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373460 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373467 4768 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373472 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373477 4768 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373482 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373487 4768 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373492 4768 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373497 4768 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373503 4768 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373508 4768 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373513 4768 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373518 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373523 4768 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373527 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373535 4768 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373540 4768 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373545 4768 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373550 4768 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373554 4768 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373559 4768 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373564 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373570 4768 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373575 4768 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373580 4768 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373585 4768 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373609 4768 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373615 4768 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373620 4768 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373625 4768 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373630 4768 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373635 4768 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373640 4768 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373646 4768 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373653 4768 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373658 4768 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373664 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373670 4768 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373675 4768 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373681 4768 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373686 4768 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373691 4768 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373696 4768 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373701 4768 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373707 4768 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373711 4768 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373716 4768 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373722 4768 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373726 4768 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373732 4768 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373737 4768 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373742 4768 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373747 4768 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373752 4768 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373757 4768 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373762 4768 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373768 4768 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373773 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.373779 4768 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.373789 4768 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.374035 4768 server.go:940] "Client rotation is on, will bootstrap in background" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.381544 4768 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.381753 4768 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.382586 4768 server.go:997] "Starting client certificate rotation" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.382647 4768 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.382870 4768 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-30 22:20:55.436388254 +0000 UTC Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.383009 4768 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 654h2m32.053383633s for next certificate rotation Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.390982 4768 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.393617 4768 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.403714 4768 log.go:25] "Validated CRI v1 runtime API" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.422411 4768 log.go:25] "Validated CRI v1 image API" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.424876 4768 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.428421 4768 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-03-16-13-44-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.428482 4768 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.451113 4768 manager.go:217] Machine: {Timestamp:2025-12-03 16:18:23.449038461 +0000 UTC m=+0.368374964 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:507ff355-2b46-4e3a-9065-268c99e59f9e BootID:f04c821a-4bbc-4c51-b87d-ffb4482e494c Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:38:ac:28 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:38:ac:28 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:54:2b:f1 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:8c:b1:06 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:22:bf:ef Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:18:6a:0d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:06:9c:bd:26:e2:35 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:f6:cd:52:b4:19:5b Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.451532 4768 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.451940 4768 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.455179 4768 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.456135 4768 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.456215 4768 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.456486 4768 topology_manager.go:138] "Creating topology manager with none policy" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.456501 4768 container_manager_linux.go:303] "Creating device plugin manager" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.456798 4768 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.456846 4768 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.457199 4768 state_mem.go:36] "Initialized new in-memory state store" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.457308 4768 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.457943 4768 kubelet.go:418] "Attempting to sync node with API server" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.457968 4768 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.457994 4768 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.458010 4768 kubelet.go:324] "Adding apiserver pod source" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.458024 4768 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.460614 4768 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.461021 4768 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.462370 4768 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463095 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463123 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463132 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463140 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463152 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463162 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463170 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463182 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463198 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463209 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463261 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463269 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.463678 4768 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.464134 4768 server.go:1280] "Started kubelet" Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.464151 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.464337 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.464443 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.464510 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.464858 4768 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.464934 4768 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.464924 4768 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 03 16:18:23 crc systemd[1]: Started Kubernetes Kubelet. Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.467846 4768 server.go:460] "Adding debug handlers to kubelet server" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468059 4768 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468124 4768 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468174 4768 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468300 4768 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 09:51:24.976708188 +0000 UTC Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468356 4768 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 833h33m1.508354131s for next certificate rotation Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468422 4768 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468430 4768 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.468638 4768 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.468587 4768 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.469962 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.470258 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.470323 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="200ms" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.470460 4768 factory.go:55] Registering systemd factory Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.470569 4768 factory.go:221] Registration of the systemd container factory successfully Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.471021 4768 factory.go:153] Registering CRI-O factory Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.471057 4768 factory.go:221] Registration of the crio container factory successfully Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.471150 4768 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.471176 4768 factory.go:103] Registering Raw factory Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.471191 4768 manager.go:1196] Started watching for new ooms in manager Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.470957 4768 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.38:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187dc0e16f270ca1 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:18:23.464107169 +0000 UTC m=+0.383443592,LastTimestamp:2025-12-03 16:18:23.464107169 +0000 UTC m=+0.383443592,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.471772 4768 manager.go:319] Starting recovery of all containers Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.483794 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.483921 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.483957 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.483988 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484019 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484047 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484076 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484106 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484144 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484174 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484202 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484230 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484259 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484297 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484330 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484363 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484392 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484432 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484464 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484494 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484534 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484563 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484631 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484667 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484699 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484755 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484793 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484829 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484858 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484886 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484917 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484948 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.484983 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485014 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485042 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485076 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485107 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485137 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485165 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485196 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485225 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485252 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485281 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485316 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485347 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485376 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485405 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485439 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485472 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485509 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485538 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485578 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485666 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485707 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485739 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485771 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485799 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485831 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485861 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485891 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.485995 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486032 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486066 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486097 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486126 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486167 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486197 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486228 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486261 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486305 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486335 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486364 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486394 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486423 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486454 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486481 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486511 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486541 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486568 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486636 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486671 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486705 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486733 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486766 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486795 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486824 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486853 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486883 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486915 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486946 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.486990 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487019 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487063 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487091 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487120 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487148 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487178 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487211 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487240 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487270 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487297 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487325 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487357 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487385 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487433 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487470 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487503 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487535 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487565 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487632 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487669 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487702 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487736 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487770 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487801 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487880 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487909 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487937 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.487966 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488014 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488045 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488077 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488109 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488141 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488174 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488202 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488231 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488259 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488286 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488320 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488351 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488383 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488413 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488442 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488472 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488504 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488531 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488558 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488588 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488651 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488681 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488714 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488742 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488771 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488798 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488830 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488859 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488895 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488930 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488959 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.488987 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489013 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489041 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489070 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489100 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489130 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489160 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489190 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489233 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489264 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489294 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489327 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489356 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489384 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489414 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489445 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489471 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489501 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489531 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489559 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489587 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489655 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489684 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489717 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489746 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489777 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489806 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489856 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489888 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489921 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.489972 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490002 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490034 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490064 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490094 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490122 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490152 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490182 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490211 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490239 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490267 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.490299 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491334 4768 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491397 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491433 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491462 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491491 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491519 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491548 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491577 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491645 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491677 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491706 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491735 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491762 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491795 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491827 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491856 4768 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491884 4768 reconstruct.go:97] "Volume reconstruction finished" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.491902 4768 reconciler.go:26] "Reconciler: start to sync state" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.508655 4768 manager.go:324] Recovery completed Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.518714 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.521130 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.521270 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.521293 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.522398 4768 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.522425 4768 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.522459 4768 state_mem.go:36] "Initialized new in-memory state store" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.526208 4768 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.530225 4768 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.530287 4768 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.530327 4768 kubelet.go:2335] "Starting kubelet main sync loop" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.530395 4768 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 03 16:18:23 crc kubenswrapper[4768]: W1203 16:18:23.532704 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.532856 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.536836 4768 policy_none.go:49] "None policy: Start" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.539323 4768 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.539367 4768 state_mem.go:35] "Initializing new in-memory state store" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.569185 4768 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.603896 4768 manager.go:334] "Starting Device Plugin manager" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.604086 4768 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.604116 4768 server.go:79] "Starting device plugin registration server" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.604881 4768 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.604909 4768 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.605293 4768 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.605496 4768 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.605520 4768 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.614908 4768 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.630965 4768 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.631140 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.635567 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.635627 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.635641 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.635804 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.636377 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.636487 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.636557 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.636578 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.636588 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.636717 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.636989 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.637067 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.637715 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.637765 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.637783 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.637972 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638088 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638099 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638140 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638146 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638161 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638257 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638292 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638312 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638914 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638947 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.638962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.639082 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.639211 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.639260 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.639865 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.639966 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640056 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640268 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640285 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640321 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640339 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640348 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640353 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640629 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.640678 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.641564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.641612 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.641628 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.672179 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="400ms" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694405 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694449 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694483 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694508 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694536 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694563 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694587 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694913 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.694960 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.695036 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.695094 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.695135 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.695170 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.695211 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.695244 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.705191 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.706661 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.706717 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.706741 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.706787 4768 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.707491 4768 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.38:6443: connect: connection refused" node="crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796082 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796157 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796177 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796197 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796216 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796232 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796250 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796268 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796289 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796309 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796331 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796345 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796362 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796385 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796415 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796442 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796514 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796549 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796557 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796814 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796626 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796643 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796651 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796691 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796691 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796723 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796733 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796728 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796617 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.796902 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.907971 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.909632 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.909689 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.909702 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.909740 4768 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:18:23 crc kubenswrapper[4768]: E1203 16:18:23.910336 4768 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.38:6443: connect: connection refused" node="crc" Dec 03 16:18:23 crc kubenswrapper[4768]: I1203 16:18:23.972446 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.004352 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:24 crc kubenswrapper[4768]: W1203 16:18:24.007016 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-a92e1fddda0dfa97152a25880a5abd9f1f696b58d82c091f093d30b8c557e0f3 WatchSource:0}: Error finding container a92e1fddda0dfa97152a25880a5abd9f1f696b58d82c091f093d30b8c557e0f3: Status 404 returned error can't find the container with id a92e1fddda0dfa97152a25880a5abd9f1f696b58d82c091f093d30b8c557e0f3 Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.012951 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 03 16:18:24 crc kubenswrapper[4768]: W1203 16:18:24.026254 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-5ff79655773dd23dcec0e88acba003612c62f15cefd769aae49aaa2004301de0 WatchSource:0}: Error finding container 5ff79655773dd23dcec0e88acba003612c62f15cefd769aae49aaa2004301de0: Status 404 returned error can't find the container with id 5ff79655773dd23dcec0e88acba003612c62f15cefd769aae49aaa2004301de0 Dec 03 16:18:24 crc kubenswrapper[4768]: W1203 16:18:24.027420 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-71a00281a705461fd605a20f36854353959dab6c46ee1dfd00847de7be6ed400 WatchSource:0}: Error finding container 71a00281a705461fd605a20f36854353959dab6c46ee1dfd00847de7be6ed400: Status 404 returned error can't find the container with id 71a00281a705461fd605a20f36854353959dab6c46ee1dfd00847de7be6ed400 Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.035529 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.041747 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 03 16:18:24 crc kubenswrapper[4768]: W1203 16:18:24.058736 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-e68b469486796ec696d2762a8972ad1396884921e5f8e426c34cb2c30f9a0071 WatchSource:0}: Error finding container e68b469486796ec696d2762a8972ad1396884921e5f8e426c34cb2c30f9a0071: Status 404 returned error can't find the container with id e68b469486796ec696d2762a8972ad1396884921e5f8e426c34cb2c30f9a0071 Dec 03 16:18:24 crc kubenswrapper[4768]: W1203 16:18:24.065530 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-317f3f7ee24c892159583d9a0095510f1d1435da51e3d992c698ad7aa1d693a5 WatchSource:0}: Error finding container 317f3f7ee24c892159583d9a0095510f1d1435da51e3d992c698ad7aa1d693a5: Status 404 returned error can't find the container with id 317f3f7ee24c892159583d9a0095510f1d1435da51e3d992c698ad7aa1d693a5 Dec 03 16:18:24 crc kubenswrapper[4768]: E1203 16:18:24.074337 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="800ms" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.311033 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.313100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.313166 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.313178 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.313211 4768 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:18:24 crc kubenswrapper[4768]: E1203 16:18:24.313883 4768 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.38:6443: connect: connection refused" node="crc" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.465865 4768 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.536800 4768 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6898ccc169a048ff2933a9f7d690840c204ab235b149d5787c32a732a7de5f48" exitCode=0 Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.536920 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6898ccc169a048ff2933a9f7d690840c204ab235b149d5787c32a732a7de5f48"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.537088 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"317f3f7ee24c892159583d9a0095510f1d1435da51e3d992c698ad7aa1d693a5"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.537287 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.538888 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.538926 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.538939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.539077 4768 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51" exitCode=0 Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.539205 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.539284 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"e68b469486796ec696d2762a8972ad1396884921e5f8e426c34cb2c30f9a0071"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.539438 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.540572 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.540669 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.540690 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.542878 4768 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7" exitCode=0 Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.542990 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.543103 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"5ff79655773dd23dcec0e88acba003612c62f15cefd769aae49aaa2004301de0"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.543266 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.544507 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.544544 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.544557 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.547818 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.547869 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"71a00281a705461fd605a20f36854353959dab6c46ee1dfd00847de7be6ed400"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.550619 4768 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f" exitCode=0 Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.550656 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.550677 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a92e1fddda0dfa97152a25880a5abd9f1f696b58d82c091f093d30b8c557e0f3"} Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.550801 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.552113 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.552157 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.552176 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.555398 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.556294 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.556329 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:24 crc kubenswrapper[4768]: I1203 16:18:24.556343 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:24 crc kubenswrapper[4768]: W1203 16:18:24.665746 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:24 crc kubenswrapper[4768]: E1203 16:18:24.665835 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:24 crc kubenswrapper[4768]: W1203 16:18:24.740479 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:24 crc kubenswrapper[4768]: E1203 16:18:24.740581 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:24 crc kubenswrapper[4768]: E1203 16:18:24.876214 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="1.6s" Dec 03 16:18:25 crc kubenswrapper[4768]: W1203 16:18:25.003573 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:25 crc kubenswrapper[4768]: E1203 16:18:25.003730 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.114794 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.116308 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.116339 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.116348 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.116370 4768 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:18:25 crc kubenswrapper[4768]: E1203 16:18:25.116770 4768 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.38:6443: connect: connection refused" node="crc" Dec 03 16:18:25 crc kubenswrapper[4768]: W1203 16:18:25.130419 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:25 crc kubenswrapper[4768]: E1203 16:18:25.130529 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.38:6443: connect: connection refused" logger="UnhandledError" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.466963 4768 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.38:6443: connect: connection refused Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.556648 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.556707 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.558375 4768 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3d7dd2a1ed57d9c41204f95193d64f87f89cab3452397d1a3fe653809153d589" exitCode=0 Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.558435 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3d7dd2a1ed57d9c41204f95193d64f87f89cab3452397d1a3fe653809153d589"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.558582 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.559694 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.559739 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.559751 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.563018 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.563103 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.563127 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.563246 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.564186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.564212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.564224 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.565506 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.565581 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.566368 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.566408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.566425 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.569535 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.569566 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.569584 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a"} Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.569711 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.570690 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.570735 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:25 crc kubenswrapper[4768]: I1203 16:18:25.570748 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.576299 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2"} Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.576354 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde"} Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.576367 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929"} Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.576516 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.581623 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.581787 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.581816 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.586040 4768 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="33c19eac967d642ed4e5dc6ed0e18c5c7f7d351a3fdd8e550924c2b413382e41" exitCode=0 Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.586147 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"33c19eac967d642ed4e5dc6ed0e18c5c7f7d351a3fdd8e550924c2b413382e41"} Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.586195 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.586429 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.588234 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.588299 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.588319 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.588349 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.588394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.588411 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.717480 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.719213 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.719278 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.719289 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:26 crc kubenswrapper[4768]: I1203 16:18:26.719338 4768 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.010162 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.369679 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.595825 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"88808a65a8983f8cac59cfa2db8e097dc8c2c0395b3121b4dbbc89fbbff2cf3a"} Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.595903 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8e54bdff646b76809e1fbaac3e0881ac21311b6347270117d14113a1f3ca3077"} Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.595931 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"af4a80cf1721650ceda0bde2feba84dfd03f274e019b9d0f1348f7ff92f55f0f"} Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.596044 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.596064 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.596203 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.597618 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.597659 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.597673 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.598969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.599055 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.599125 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:27 crc kubenswrapper[4768]: I1203 16:18:27.672418 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.604839 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"476fd31133a2c7f6ff99562b16dec4ece22f94cdd5d0c5c1e138ae4d65c862a1"} Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.604933 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f8a57fe486321b00bf04c4d4e7f2c6ce0d3123a70fd386d5bae43d594bb76591"} Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.605028 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.605053 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.606112 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.606174 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.606882 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.606976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.606999 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.607381 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.607437 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.607448 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.607445 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.607491 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:28 crc kubenswrapper[4768]: I1203 16:18:28.607514 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:29 crc kubenswrapper[4768]: I1203 16:18:29.528013 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 03 16:18:29 crc kubenswrapper[4768]: I1203 16:18:29.608792 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:29 crc kubenswrapper[4768]: I1203 16:18:29.610250 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:29 crc kubenswrapper[4768]: I1203 16:18:29.610312 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:29 crc kubenswrapper[4768]: I1203 16:18:29.610809 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.011067 4768 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.011208 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.612205 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.613701 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.613781 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.613802 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.697081 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.697352 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.699245 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.699306 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:30 crc kubenswrapper[4768]: I1203 16:18:30.699325 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:31 crc kubenswrapper[4768]: I1203 16:18:31.362450 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:31 crc kubenswrapper[4768]: I1203 16:18:31.614576 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:31 crc kubenswrapper[4768]: I1203 16:18:31.616486 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:31 crc kubenswrapper[4768]: I1203 16:18:31.616547 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:31 crc kubenswrapper[4768]: I1203 16:18:31.616557 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.367614 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.367845 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.369304 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.369336 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.369346 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.373166 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.593515 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.593867 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.595579 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.595629 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.595640 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.617584 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.619040 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.619293 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:32 crc kubenswrapper[4768]: I1203 16:18:32.619531 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.589516 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.589845 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.591311 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.591351 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.591364 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:33 crc kubenswrapper[4768]: E1203 16:18:33.615089 4768 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.865373 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.865574 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.867159 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.867194 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:33 crc kubenswrapper[4768]: I1203 16:18:33.867211 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:36 crc kubenswrapper[4768]: I1203 16:18:36.468068 4768 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 03 16:18:36 crc kubenswrapper[4768]: E1203 16:18:36.477509 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 03 16:18:36 crc kubenswrapper[4768]: W1203 16:18:36.531082 4768 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 03 16:18:36 crc kubenswrapper[4768]: I1203 16:18:36.531226 4768 trace.go:236] Trace[1861528856]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:18:26.530) (total time: 10001ms): Dec 03 16:18:36 crc kubenswrapper[4768]: Trace[1861528856]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (16:18:36.531) Dec 03 16:18:36 crc kubenswrapper[4768]: Trace[1861528856]: [10.001128688s] [10.001128688s] END Dec 03 16:18:36 crc kubenswrapper[4768]: E1203 16:18:36.531265 4768 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 03 16:18:36 crc kubenswrapper[4768]: I1203 16:18:36.728375 4768 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 16:18:36 crc kubenswrapper[4768]: I1203 16:18:36.728461 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 16:18:36 crc kubenswrapper[4768]: I1203 16:18:36.741561 4768 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 03 16:18:36 crc kubenswrapper[4768]: I1203 16:18:36.741677 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.500551 4768 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.500697 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.820910 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.821153 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.822671 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.822749 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.822771 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:39 crc kubenswrapper[4768]: I1203 16:18:39.843913 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.011250 4768 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.011350 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.640936 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.642460 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.642533 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.642558 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.698006 4768 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 16:18:40 crc kubenswrapper[4768]: I1203 16:18:40.698075 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.371896 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.372229 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.373184 4768 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.373320 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.373917 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.373976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.373995 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.379631 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.379648 4768 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.469550 4768 apiserver.go:52] "Watching apiserver" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.480161 4768 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.480832 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.481358 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.481491 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.481874 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.482014 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.482583 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.482712 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.482733 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.482725 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.482881 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.486350 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.486466 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.486525 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.488559 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.488580 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.488997 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.489064 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.489527 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.489669 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.530070 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.547171 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.564721 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.569633 4768 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.583145 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.598118 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.615814 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.632338 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.645326 4768 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.645426 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.662465 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.715118 4768 trace.go:236] Trace[36828744]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:18:27.718) (total time: 13996ms): Dec 03 16:18:41 crc kubenswrapper[4768]: Trace[36828744]: ---"Objects listed" error: 13996ms (16:18:41.714) Dec 03 16:18:41 crc kubenswrapper[4768]: Trace[36828744]: [13.996545852s] [13.996545852s] END Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.715166 4768 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.721382 4768 trace.go:236] Trace[502845902]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:18:28.222) (total time: 13498ms): Dec 03 16:18:41 crc kubenswrapper[4768]: Trace[502845902]: ---"Objects listed" error: 13498ms (16:18:41.721) Dec 03 16:18:41 crc kubenswrapper[4768]: Trace[502845902]: [13.498906129s] [13.498906129s] END Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.721430 4768 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.721729 4768 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.721962 4768 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.724658 4768 trace.go:236] Trace[1232408343]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Dec-2025 16:18:27.927) (total time: 13797ms): Dec 03 16:18:41 crc kubenswrapper[4768]: Trace[1232408343]: ---"Objects listed" error: 13797ms (16:18:41.724) Dec 03 16:18:41 crc kubenswrapper[4768]: Trace[1232408343]: [13.797500784s] [13.797500784s] END Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.724699 4768 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822579 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822647 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822677 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822702 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822729 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822757 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822783 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822805 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822831 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822854 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822884 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822912 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822935 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822960 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.822981 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823003 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823025 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823050 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823073 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823064 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823103 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823131 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823155 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823175 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823203 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823226 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823248 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823275 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823298 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823380 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823408 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823432 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823481 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823508 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823532 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823560 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823588 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823633 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823657 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823680 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823703 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823724 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823753 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823815 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823839 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823866 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823887 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823909 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823931 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823954 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823974 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823995 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824024 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824052 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824077 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824168 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824195 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824221 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824244 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824268 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824295 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824355 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824380 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824404 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824427 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824453 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824479 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824504 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824535 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824560 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824587 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823088 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823100 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823424 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823643 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823755 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.823911 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824004 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824000 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825547 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825546 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824128 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824208 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824243 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824369 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824427 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824411 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824476 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824548 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824637 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.824651 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:18:42.324615808 +0000 UTC m=+19.243952231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825810 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825853 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825860 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825871 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825900 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824871 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825923 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824923 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824964 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825031 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825075 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825211 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825417 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825422 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824078 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826061 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826175 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826286 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.824818 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826591 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826677 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826826 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826928 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.826943 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.827011 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.827066 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.827417 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.827654 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.827785 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.828548 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.828716 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.825935 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830304 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830348 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830347 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830384 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830419 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830452 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830492 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830524 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830570 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830660 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830703 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830742 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830784 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830817 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830850 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830884 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830916 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830944 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.830956 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831046 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831096 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831132 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831168 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831146 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831199 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831362 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831416 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831502 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831539 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831579 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831651 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831688 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831725 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831767 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831804 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831840 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831878 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831916 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831956 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831991 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832029 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832064 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832104 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832138 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832171 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832208 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832241 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832276 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832313 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832349 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832384 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832421 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832459 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832496 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832530 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832568 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832649 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832707 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832748 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832784 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832817 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832853 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832890 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832926 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832962 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833000 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833035 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833073 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833108 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833141 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833180 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833404 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833442 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833483 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833845 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833900 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833937 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833975 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834010 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834048 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834081 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834119 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834158 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834192 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834226 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834261 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834488 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834522 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834574 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834639 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834676 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834713 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834746 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834782 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834817 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834861 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834898 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834932 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834969 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835004 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835042 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835080 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835117 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835150 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835184 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835219 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835257 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835291 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835326 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835360 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835404 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835467 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835502 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835537 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835576 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835636 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835673 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835707 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835743 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835779 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835817 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835856 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835898 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835936 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835978 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836015 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836050 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836125 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836198 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836239 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836281 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837705 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837785 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837849 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837904 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837955 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.838634 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.838700 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831541 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831572 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831699 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831820 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831910 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.831998 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832444 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832588 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832773 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.832916 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833395 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.833826 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.838942 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834118 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834501 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.834582 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835262 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835393 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835588 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835614 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835690 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.835885 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836327 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.836477 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837308 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837641 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837699 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837816 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.837852 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.838142 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.838178 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.838727 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.839377 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:42.339233597 +0000 UTC m=+19.258570020 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.839434 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.840216 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.840460 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.840456 4768 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.840481 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.840623 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.840787 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.840907 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841039 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.838871 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841505 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841548 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841700 4768 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841719 4768 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841737 4768 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841750 4768 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841766 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841781 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841795 4768 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841807 4768 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841821 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841835 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841847 4768 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841862 4768 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841875 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841889 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841901 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841914 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841926 4768 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841937 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841949 4768 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841961 4768 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841974 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.841987 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842000 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842013 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842026 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842041 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842021 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842848 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842907 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.842932 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843083 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843081 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843102 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843128 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843313 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843624 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843830 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843968 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.843983 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.844042 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.844218 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.844528 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.844583 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.844688 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.844917 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.844985 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.845229 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.845274 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.845387 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.845444 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.845542 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.845684 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.845844 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.846044 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.846381 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.846987 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.847363 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.847669 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.847856 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.848036 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.848262 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.848499 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.848816 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.849054 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.849078 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.849272 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.849293 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.849487 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.849647 4768 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.849997 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.850673 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.851094 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852071 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852234 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.850351 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:42.349366959 +0000 UTC m=+19.268703482 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852505 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852587 4768 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852671 4768 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852743 4768 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852808 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.852877 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853301 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853408 4768 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853476 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853553 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853657 4768 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853734 4768 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853806 4768 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853873 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.853936 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854003 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854076 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854280 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854322 4768 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854346 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854369 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854390 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854410 4768 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854429 4768 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854448 4768 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854466 4768 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854487 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854507 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854527 4768 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854551 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854571 4768 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854641 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854659 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854681 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854700 4768 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854719 4768 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854737 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854756 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854777 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854795 4768 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854816 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854835 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854856 4768 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854875 4768 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854894 4768 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854913 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854932 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854952 4768 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854971 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.854990 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855010 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855028 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855046 4768 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855064 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855083 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855103 4768 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855125 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855143 4768 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855161 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855180 4768 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855198 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855217 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855236 4768 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855255 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855274 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.855293 4768 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.856100 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.856623 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.857144 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.857761 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.857930 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.858412 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.859427 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.859675 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.859820 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.859930 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.859963 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.860330 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.860808 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.861064 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.861656 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.862240 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:42.362217725 +0000 UTC m=+19.281554148 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.861061 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.861658 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.860963 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.860904 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.862630 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.861153 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.861157 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.862030 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.862782 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:41 crc kubenswrapper[4768]: E1203 16:18:41.862999 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:42.362991034 +0000 UTC m=+19.282327457 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.864231 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.864284 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.864330 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.864410 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.864640 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.864858 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.865105 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.865663 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.865971 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.866058 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.866248 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.868638 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.870657 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.870728 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.870904 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.871180 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.871332 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.872571 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.873137 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.873215 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.873573 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.874128 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.873400 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.875751 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.876146 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.876192 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.876354 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.878520 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.878728 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.878828 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.878529 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.880876 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.881384 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.882570 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.883525 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.883987 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.884022 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.884875 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.885133 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.890307 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.891564 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.891962 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.898803 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.915559 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.916637 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957427 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957506 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957636 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957672 4768 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957691 4768 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957708 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957724 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957740 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957756 4768 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957774 4768 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957791 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957806 4768 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957822 4768 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957837 4768 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957852 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957868 4768 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957883 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957898 4768 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957915 4768 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957933 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957950 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957967 4768 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.957985 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958002 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958018 4768 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958035 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958052 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958068 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958086 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958103 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958121 4768 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958138 4768 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958155 4768 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958171 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958188 4768 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958204 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958220 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958236 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958254 4768 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958270 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958288 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958306 4768 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958323 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958338 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958353 4768 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958369 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958384 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958399 4768 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958414 4768 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958429 4768 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958445 4768 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958461 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958476 4768 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958492 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958508 4768 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958523 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958539 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958554 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958570 4768 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958587 4768 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958629 4768 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958644 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958658 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958673 4768 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958688 4768 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958704 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958719 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958737 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958752 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958766 4768 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958781 4768 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958796 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958813 4768 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958829 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958845 4768 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958860 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958876 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958891 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958909 4768 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958924 4768 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958941 4768 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958957 4768 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958972 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.958988 4768 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959003 4768 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959025 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959043 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959059 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959075 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959091 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959107 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959123 4768 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959139 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959152 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959168 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959184 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959200 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959215 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959231 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959247 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959287 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959303 4768 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959318 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959387 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:41 crc kubenswrapper[4768]: I1203 16:18:41.959583 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.104423 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.117067 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.129415 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 03 16:18:42 crc kubenswrapper[4768]: W1203 16:18:42.146838 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-140a2b51c3b2862cfdd96e033bad364793fc09ed2c8343c28acac0a92d36bc6f WatchSource:0}: Error finding container 140a2b51c3b2862cfdd96e033bad364793fc09ed2c8343c28acac0a92d36bc6f: Status 404 returned error can't find the container with id 140a2b51c3b2862cfdd96e033bad364793fc09ed2c8343c28acac0a92d36bc6f Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.362645 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.362779 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.362823 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.362862 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363009 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:18:43.362943222 +0000 UTC m=+20.282279685 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363023 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363076 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363143 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:43.363129356 +0000 UTC m=+20.282465809 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363025 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363221 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:43.363183207 +0000 UTC m=+20.282519650 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363227 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363253 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.363337 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:43.36331272 +0000 UTC m=+20.282649173 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.463911 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.464200 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.464264 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.464288 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:42 crc kubenswrapper[4768]: E1203 16:18:42.464390 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:43.464362011 +0000 UTC m=+20.383698474 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.650176 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"140a2b51c3b2862cfdd96e033bad364793fc09ed2c8343c28acac0a92d36bc6f"} Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.652959 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983"} Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.653047 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2"} Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.653071 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d3dff7c978a153530f9c93989d319aeb27b986429362646647019b25593f1194"} Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.655477 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5"} Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.655534 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"fbf781adc4d0dcf1f26cf53f78953172220a8680a62bda8061196480c8bed31e"} Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.664680 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.679503 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.695234 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.712636 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.725421 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.743357 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.757722 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.772266 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.784299 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.795156 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.807185 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.820820 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.833676 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.844556 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 03 16:18:42 crc kubenswrapper[4768]: I1203 16:18:42.857822 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:42Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.373069 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.373180 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.373212 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.373237 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373310 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:18:45.373287356 +0000 UTC m=+22.292623779 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373384 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373419 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:45.373412449 +0000 UTC m=+22.292748862 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373540 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373638 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373674 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373800 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:45.373758247 +0000 UTC m=+22.293094720 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373904 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.373942 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:45.373933531 +0000 UTC m=+22.293269954 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.474370 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.474583 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.474633 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.474650 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.474715 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:45.474694855 +0000 UTC m=+22.394031278 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.531011 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.531143 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.531454 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.531501 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.531545 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:43 crc kubenswrapper[4768]: E1203 16:18:43.531591 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.537501 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.538586 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.540965 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.542222 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.544247 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.545287 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.546472 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.548502 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.549967 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.551782 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.552469 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.553632 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.554166 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.554754 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.555300 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.555859 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.556504 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.556970 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.557308 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.557749 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.558466 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.559102 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.559816 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.560372 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.561187 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.563465 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.565040 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.566648 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.567657 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.568827 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.569867 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.570863 4768 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.571075 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.575627 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.576758 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.577418 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.578689 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.579090 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.580068 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.580904 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.581872 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.582855 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.583585 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.587139 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.588495 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.589396 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.590585 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.591179 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.592062 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.592814 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.593814 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.594273 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.594770 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.595684 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.596249 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.597120 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.599255 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.618972 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.636216 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.657454 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.673907 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.876238 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.898752 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.906734 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.927476 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.941229 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.961042 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.976932 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:43 crc kubenswrapper[4768]: I1203 16:18:43.991424 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.006246 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.922796 4768 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.925697 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.925797 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.925822 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.925937 4768 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.939683 4768 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.940067 4768 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.941964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.942039 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.942062 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.942090 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.942113 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:44Z","lastTransitionTime":"2025-12-03T16:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:44 crc kubenswrapper[4768]: E1203 16:18:44.980697 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:44Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.994388 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.994443 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.994457 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.994480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:44 crc kubenswrapper[4768]: I1203 16:18:44.994495 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:44Z","lastTransitionTime":"2025-12-03T16:18:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.015781 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.021065 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.021122 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.021133 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.021160 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.021173 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.041901 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.052692 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.052753 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.052771 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.052798 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.052816 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.072872 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.078646 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.078698 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.078716 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.078743 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.078762 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.097354 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.097575 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.100153 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.100210 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.100229 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.100254 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.100272 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.203290 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.203352 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.203367 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.203391 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.203406 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.306836 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.306888 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.306899 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.306920 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.306935 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.394729 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.394854 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.394920 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:18:49.394882328 +0000 UTC m=+26.314218781 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.394981 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.395018 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.395077 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:49.395032431 +0000 UTC m=+26.314368884 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.395103 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.395271 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.395305 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.395328 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.395321 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.395390 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:49.39537457 +0000 UTC m=+26.314711023 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.395465 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:49.395431901 +0000 UTC m=+26.314768364 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.409954 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.409994 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.410003 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.410022 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.410035 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.496521 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.496827 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.496876 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.496892 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.496978 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:49.496950623 +0000 UTC m=+26.416287256 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.512830 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.512876 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.512898 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.512918 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.512930 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.531427 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.531452 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.531419 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.531556 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.531777 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:45 crc kubenswrapper[4768]: E1203 16:18:45.532020 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.616287 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.616340 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.616350 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.616371 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.616382 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.719521 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.719647 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.719675 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.719709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.719732 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.822709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.822772 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.822785 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.822809 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.822827 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.926493 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.926552 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.926564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.926586 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:45 crc kubenswrapper[4768]: I1203 16:18:45.926621 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:45Z","lastTransitionTime":"2025-12-03T16:18:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.030376 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.030450 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.030473 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.030509 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.030530 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.133385 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.133467 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.133487 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.133519 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.133541 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.237561 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.237635 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.237646 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.237668 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.237682 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.340297 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.340710 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.340815 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.340921 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.341013 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.444408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.444817 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.444911 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.445033 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.445125 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.547856 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.548127 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.548201 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.548268 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.548325 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.650558 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.650613 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.650624 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.650640 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.650653 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.676884 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.707404 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.727859 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.753116 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.753154 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.753162 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.753176 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.753187 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.768813 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.791723 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-vgj7g"] Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.792144 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: W1203 16:18:46.798199 4768 reflector.go:561] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": failed to list *v1.Secret: secrets "node-ca-dockercfg-4777p" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 03 16:18:46 crc kubenswrapper[4768]: W1203 16:18:46.798217 4768 reflector.go:561] object-"openshift-image-registry"/"image-registry-certificates": failed to list *v1.ConfigMap: configmaps "image-registry-certificates" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 03 16:18:46 crc kubenswrapper[4768]: E1203 16:18:46.798250 4768 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"node-ca-dockercfg-4777p\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"node-ca-dockercfg-4777p\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 16:18:46 crc kubenswrapper[4768]: E1203 16:18:46.798266 4768 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"image-registry-certificates\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"image-registry-certificates\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.798662 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-64w5t"] Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.799113 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:46 crc kubenswrapper[4768]: W1203 16:18:46.799545 4768 reflector.go:561] object-"openshift-image-registry"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 03 16:18:46 crc kubenswrapper[4768]: E1203 16:18:46.799575 4768 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 16:18:46 crc kubenswrapper[4768]: W1203 16:18:46.799587 4768 reflector.go:561] object-"openshift-image-registry"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 03 16:18:46 crc kubenswrapper[4768]: E1203 16:18:46.799658 4768 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.801636 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.803943 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.804784 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.811623 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5c8b9d71-3aeb-428c-b61a-e984cce08f37-host\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.811764 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76tfc\" (UniqueName: \"kubernetes.io/projected/5c8b9d71-3aeb-428c-b61a-e984cce08f37-kube-api-access-76tfc\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.811887 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l8ps\" (UniqueName: \"kubernetes.io/projected/9b155707-262e-482b-92c7-e097de0ff1d0-kube-api-access-8l8ps\") pod \"node-resolver-64w5t\" (UID: \"9b155707-262e-482b-92c7-e097de0ff1d0\") " pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.811993 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5c8b9d71-3aeb-428c-b61a-e984cce08f37-serviceca\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.812126 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9b155707-262e-482b-92c7-e097de0ff1d0-hosts-file\") pod \"node-resolver-64w5t\" (UID: \"9b155707-262e-482b-92c7-e097de0ff1d0\") " pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.825080 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.847759 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.855668 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.855792 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.855864 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.855933 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.856000 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.864067 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.886935 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.900539 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.913439 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76tfc\" (UniqueName: \"kubernetes.io/projected/5c8b9d71-3aeb-428c-b61a-e984cce08f37-kube-api-access-76tfc\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.913484 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l8ps\" (UniqueName: \"kubernetes.io/projected/9b155707-262e-482b-92c7-e097de0ff1d0-kube-api-access-8l8ps\") pod \"node-resolver-64w5t\" (UID: \"9b155707-262e-482b-92c7-e097de0ff1d0\") " pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.913504 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9b155707-262e-482b-92c7-e097de0ff1d0-hosts-file\") pod \"node-resolver-64w5t\" (UID: \"9b155707-262e-482b-92c7-e097de0ff1d0\") " pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.913525 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5c8b9d71-3aeb-428c-b61a-e984cce08f37-serviceca\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.913578 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5c8b9d71-3aeb-428c-b61a-e984cce08f37-host\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.913674 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5c8b9d71-3aeb-428c-b61a-e984cce08f37-host\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.913782 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/9b155707-262e-482b-92c7-e097de0ff1d0-hosts-file\") pod \"node-resolver-64w5t\" (UID: \"9b155707-262e-482b-92c7-e097de0ff1d0\") " pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.916470 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.930755 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.933190 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l8ps\" (UniqueName: \"kubernetes.io/projected/9b155707-262e-482b-92c7-e097de0ff1d0-kube-api-access-8l8ps\") pod \"node-resolver-64w5t\" (UID: \"9b155707-262e-482b-92c7-e097de0ff1d0\") " pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.943302 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.953802 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.958931 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.958977 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.958990 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.959008 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.959023 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:46Z","lastTransitionTime":"2025-12-03T16:18:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.971948 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.986655 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:46 crc kubenswrapper[4768]: I1203 16:18:46.999049 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.012437 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.015007 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.020898 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.027828 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.041977 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.056347 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.061551 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.061670 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.061741 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.061814 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.061884 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.067638 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.082015 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.097816 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.111772 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-64w5t" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.113795 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: W1203 16:18:47.124162 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9b155707_262e_482b_92c7_e097de0ff1d0.slice/crio-02a82fc6f7f02f9650d7a0df6a99d79f1fe87b88fb6052ccacff10a1f0601e0b WatchSource:0}: Error finding container 02a82fc6f7f02f9650d7a0df6a99d79f1fe87b88fb6052ccacff10a1f0601e0b: Status 404 returned error can't find the container with id 02a82fc6f7f02f9650d7a0df6a99d79f1fe87b88fb6052ccacff10a1f0601e0b Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.127436 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.146446 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.160113 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.164071 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.164104 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.164114 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.164132 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.164144 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.180386 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.200205 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.266776 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.266821 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.266831 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.266848 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.266859 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.370016 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.370064 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.370075 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.370096 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.370108 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.472882 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.472929 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.472942 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.472962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.472974 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.530970 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.531014 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:47 crc kubenswrapper[4768]: E1203 16:18:47.531154 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.531190 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:47 crc kubenswrapper[4768]: E1203 16:18:47.531321 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:47 crc kubenswrapper[4768]: E1203 16:18:47.531413 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.575720 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.575777 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.575790 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.575811 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.575825 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.584764 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5z68m"] Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.586070 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-2htqq"] Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.586270 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.586824 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-g94rv"] Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.587052 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.587147 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.587800 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-8d4dq"] Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.588300 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.590831 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.590946 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.590992 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.591823 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.592293 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.592444 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.592885 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.593066 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.598840 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.599061 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602195 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602364 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602433 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602543 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602746 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602804 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602867 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.602980 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.603099 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619665 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-cni-multus\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619743 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-multus-certs\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619776 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-system-cni-dir\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619805 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-cni-bin\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619830 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-ovn-kubernetes\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619852 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-bin\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619880 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619922 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-slash\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619945 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-ovn\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.619986 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cni-binary-copy\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620026 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fea3ff9c-dadb-4168-90c0-24bc05a888e9-rootfs\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620054 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fea3ff9c-dadb-4168-90c0-24bc05a888e9-mcd-auth-proxy-config\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620079 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-socket-dir-parent\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620113 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-systemd-units\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620136 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-var-lib-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620158 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-etc-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620191 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-os-release\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620215 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-kubelet\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620266 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-hostroot\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620291 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-etc-kubernetes\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620314 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-systemd\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620354 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-node-log\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620377 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-log-socket\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620420 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-env-overrides\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620466 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovn-node-metrics-cert\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620548 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-netns\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620574 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-netns\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620641 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620669 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-kubelet\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620694 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-config\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620731 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cnibin\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620754 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-cnibin\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620777 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xg2b\" (UniqueName: \"kubernetes.io/projected/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-kube-api-access-6xg2b\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620825 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-k8s-cni-cncf-io\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620870 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fea3ff9c-dadb-4168-90c0-24bc05a888e9-proxy-tls\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620891 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-daemon-config\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620908 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shk7l\" (UniqueName: \"kubernetes.io/projected/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-kube-api-access-shk7l\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620924 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.620942 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-cni-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.621023 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-os-release\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.621100 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-cni-binary-copy\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.621220 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-netd\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.621246 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-script-lib\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.621274 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg8jp\" (UniqueName: \"kubernetes.io/projected/fea3ff9c-dadb-4168-90c0-24bc05a888e9-kube-api-access-mg8jp\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.621303 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-conf-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.621333 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrwrt\" (UniqueName: \"kubernetes.io/projected/29dd67f1-08a6-43ed-840d-cf4b166d5664-kube-api-access-hrwrt\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.622023 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.622485 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-system-cni-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.626040 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.643291 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.653207 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.666709 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.678812 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.678894 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.678906 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.678921 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.678932 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.680258 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-64w5t" event={"ID":"9b155707-262e-482b-92c7-e097de0ff1d0","Type":"ContainerStarted","Data":"749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.680316 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-64w5t" event={"ID":"9b155707-262e-482b-92c7-e097de0ff1d0","Type":"ContainerStarted","Data":"02a82fc6f7f02f9650d7a0df6a99d79f1fe87b88fb6052ccacff10a1f0601e0b"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.682208 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.696881 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.711585 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723042 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-bin\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723104 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-system-cni-dir\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723124 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-cni-bin\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723140 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-ovn-kubernetes\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723157 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723186 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-slash\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723205 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-ovn\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723229 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cni-binary-copy\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723233 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-cni-bin\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723265 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fea3ff9c-dadb-4168-90c0-24bc05a888e9-rootfs\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723291 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fea3ff9c-dadb-4168-90c0-24bc05a888e9-mcd-auth-proxy-config\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723298 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-ovn-kubernetes\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723345 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-system-cni-dir\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723310 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-socket-dir-parent\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723400 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fea3ff9c-dadb-4168-90c0-24bc05a888e9-rootfs\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723417 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-slash\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723487 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-systemd-units\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723456 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-systemd-units\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723555 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-ovn\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723555 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723632 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-var-lib-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723693 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-socket-dir-parent\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723772 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-etc-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723777 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-var-lib-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723821 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-etc-kubernetes\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723866 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-etc-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723872 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-etc-kubernetes\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724068 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cni-binary-copy\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724255 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-os-release\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724294 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-kubelet\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724314 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-hostroot\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724318 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-os-release\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724333 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-systemd\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724354 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-node-log\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724358 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-kubelet\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724357 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fea3ff9c-dadb-4168-90c0-24bc05a888e9-mcd-auth-proxy-config\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.723233 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-bin\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724373 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-log-socket\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724383 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-systemd\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724399 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-log-socket\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724416 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-node-log\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724428 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-env-overrides\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724465 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovn-node-metrics-cert\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724515 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-hostroot\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724624 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-netns\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724651 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-netns\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724675 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724699 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-kubelet\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724720 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-config\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724721 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-netns\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724736 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-netns\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724765 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cnibin\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724787 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-cnibin\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724807 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xg2b\" (UniqueName: \"kubernetes.io/projected/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-kube-api-access-6xg2b\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724827 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-k8s-cni-cncf-io\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724846 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fea3ff9c-dadb-4168-90c0-24bc05a888e9-proxy-tls\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724863 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-daemon-config\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724884 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shk7l\" (UniqueName: \"kubernetes.io/projected/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-kube-api-access-shk7l\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724904 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724953 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-cni-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724974 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-os-release\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.724997 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-cni-binary-copy\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725024 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-netd\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725045 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-script-lib\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725074 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg8jp\" (UniqueName: \"kubernetes.io/projected/fea3ff9c-dadb-4168-90c0-24bc05a888e9-kube-api-access-mg8jp\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725090 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-env-overrides\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725104 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-conf-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725130 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-conf-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725142 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrwrt\" (UniqueName: \"kubernetes.io/projected/29dd67f1-08a6-43ed-840d-cf4b166d5664-kube-api-access-hrwrt\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725162 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cnibin\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725180 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725195 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-cnibin\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725210 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-system-cni-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725239 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-cni-multus\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725258 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-kubelet\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725263 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-multus-certs\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725293 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-multus-certs\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725395 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-config\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725211 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725572 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-system-cni-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725631 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-netd\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.725772 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-cni-dir\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726016 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726060 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-run-k8s-cni-cncf-io\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726114 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-os-release\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726144 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-openvswitch\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726185 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-host-var-lib-cni-multus\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726315 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-cni-binary-copy\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726326 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-script-lib\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.726546 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-multus-daemon-config\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.729850 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.730122 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fea3ff9c-dadb-4168-90c0-24bc05a888e9-proxy-tls\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.737092 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovn-node-metrics-cert\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.746366 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xg2b\" (UniqueName: \"kubernetes.io/projected/455c9e5e-20c7-4bb5-8ba2-de2f122d7038-kube-api-access-6xg2b\") pod \"multus-additional-cni-plugins-2htqq\" (UID: \"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\") " pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.752049 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg8jp\" (UniqueName: \"kubernetes.io/projected/fea3ff9c-dadb-4168-90c0-24bc05a888e9-kube-api-access-mg8jp\") pod \"machine-config-daemon-g94rv\" (UID: \"fea3ff9c-dadb-4168-90c0-24bc05a888e9\") " pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.752252 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shk7l\" (UniqueName: \"kubernetes.io/projected/d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9-kube-api-access-shk7l\") pod \"multus-8d4dq\" (UID: \"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\") " pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.752262 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrwrt\" (UniqueName: \"kubernetes.io/projected/29dd67f1-08a6-43ed-840d-cf4b166d5664-kube-api-access-hrwrt\") pod \"ovnkube-node-5z68m\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.755059 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.770402 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.782427 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.782474 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.782485 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.782504 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.782516 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.785584 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.794787 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.813635 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.829748 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.832461 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.834856 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.835840 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5c8b9d71-3aeb-428c-b61a-e984cce08f37-serviceca\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.861377 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.885468 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.885533 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.885547 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.885569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.885583 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.895321 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.904144 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.915164 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2htqq" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.918338 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.925748 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:18:47 crc kubenswrapper[4768]: E1203 16:18:47.930125 4768 projected.go:288] Couldn't get configMap openshift-image-registry/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 03 16:18:47 crc kubenswrapper[4768]: E1203 16:18:47.930219 4768 projected.go:194] Error preparing data for projected volume kube-api-access-76tfc for pod openshift-image-registry/node-ca-vgj7g: failed to sync configmap cache: timed out waiting for the condition Dec 03 16:18:47 crc kubenswrapper[4768]: E1203 16:18:47.930306 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/5c8b9d71-3aeb-428c-b61a-e984cce08f37-kube-api-access-76tfc podName:5c8b9d71-3aeb-428c-b61a-e984cce08f37 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:48.430281357 +0000 UTC m=+25.349617980 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-76tfc" (UniqueName: "kubernetes.io/projected/5c8b9d71-3aeb-428c-b61a-e984cce08f37-kube-api-access-76tfc") pod "node-ca-vgj7g" (UID: "5c8b9d71-3aeb-428c-b61a-e984cce08f37") : failed to sync configmap cache: timed out waiting for the condition Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.935127 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8d4dq" Dec 03 16:18:47 crc kubenswrapper[4768]: W1203 16:18:47.949580 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod455c9e5e_20c7_4bb5_8ba2_de2f122d7038.slice/crio-8fb535abe9bdd45912fcb69c989bc8c6104e26f64368fe68a9728966df9324f5 WatchSource:0}: Error finding container 8fb535abe9bdd45912fcb69c989bc8c6104e26f64368fe68a9728966df9324f5: Status 404 returned error can't find the container with id 8fb535abe9bdd45912fcb69c989bc8c6104e26f64368fe68a9728966df9324f5 Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.951259 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.975152 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:47 crc kubenswrapper[4768]: W1203 16:18:47.977193 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfea3ff9c_dadb_4168_90c0_24bc05a888e9.slice/crio-de420b9f88b4815afe9f1676421f7b31be3a2c0655d16ee18f991106deb74712 WatchSource:0}: Error finding container de420b9f88b4815afe9f1676421f7b31be3a2c0655d16ee18f991106deb74712: Status 404 returned error can't find the container with id de420b9f88b4815afe9f1676421f7b31be3a2c0655d16ee18f991106deb74712 Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.990800 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.990844 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.990855 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.990876 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.990886 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:47Z","lastTransitionTime":"2025-12-03T16:18:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:47 crc kubenswrapper[4768]: I1203 16:18:47.999847 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.019166 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.034428 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.054234 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.069031 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.082954 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.095318 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.095393 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.095407 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.096083 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.096165 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.096188 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.198716 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.198747 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.198756 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.198772 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.198781 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.301502 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.301565 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.301576 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.301613 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.301628 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.368193 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.405134 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.405186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.405195 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.405215 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.405228 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.432455 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76tfc\" (UniqueName: \"kubernetes.io/projected/5c8b9d71-3aeb-428c-b61a-e984cce08f37-kube-api-access-76tfc\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.439499 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76tfc\" (UniqueName: \"kubernetes.io/projected/5c8b9d71-3aeb-428c-b61a-e984cce08f37-kube-api-access-76tfc\") pod \"node-ca-vgj7g\" (UID: \"5c8b9d71-3aeb-428c-b61a-e984cce08f37\") " pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.508630 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.508695 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.508708 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.508732 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.508751 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.604048 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-vgj7g" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.612465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.612518 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.612530 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.612549 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.612563 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: W1203 16:18:48.622677 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c8b9d71_3aeb_428c_b61a_e984cce08f37.slice/crio-37aefb527c018e754971706624c697fc226d3757f921cd1695f506f4e6e99496 WatchSource:0}: Error finding container 37aefb527c018e754971706624c697fc226d3757f921cd1695f506f4e6e99496: Status 404 returned error can't find the container with id 37aefb527c018e754971706624c697fc226d3757f921cd1695f506f4e6e99496 Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.687484 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838" exitCode=0 Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.687579 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.687713 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"3dabdd1faab942b3c53611e55a42e0a8a48f654bf99e4ad9077dd39de24b6584"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.691665 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.691751 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.691792 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"de420b9f88b4815afe9f1676421f7b31be3a2c0655d16ee18f991106deb74712"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.694514 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerStarted","Data":"8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.694552 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerStarted","Data":"6bb62c820d52d74755029d4ec4aa4059ba2bd89450444f8f12c447eb794c30fc"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.696364 4768 generic.go:334] "Generic (PLEG): container finished" podID="455c9e5e-20c7-4bb5-8ba2-de2f122d7038" containerID="13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98" exitCode=0 Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.696433 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerDied","Data":"13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.696468 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerStarted","Data":"8fb535abe9bdd45912fcb69c989bc8c6104e26f64368fe68a9728966df9324f5"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.701881 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.705220 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vgj7g" event={"ID":"5c8b9d71-3aeb-428c-b61a-e984cce08f37","Type":"ContainerStarted","Data":"37aefb527c018e754971706624c697fc226d3757f921cd1695f506f4e6e99496"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.715950 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.715979 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.715989 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.716004 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.716015 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.716688 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.731410 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.746643 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.776013 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.790156 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.803621 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.816363 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.824647 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.824704 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.824720 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.824750 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.824768 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.832014 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.845860 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.857060 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.870965 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.885546 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.898700 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.910725 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.921334 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.928040 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.928088 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.928104 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.928127 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.928142 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:48Z","lastTransitionTime":"2025-12-03T16:18:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.945116 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.961875 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.977978 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:48 crc kubenswrapper[4768]: I1203 16:18:48.995272 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:48Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.013369 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.028792 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.031551 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.031584 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.031617 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.031680 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.031694 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.040967 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.055447 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.070469 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.088069 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.106953 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.123620 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.135073 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.135130 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.135143 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.135165 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.135182 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.237704 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.237786 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.237804 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.237832 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.237853 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.340269 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.340321 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.340335 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.340357 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.340373 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.441453 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.441565 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.441618 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.441656 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.441767 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:18:57.441718388 +0000 UTC m=+34.361054851 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.441783 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.441860 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.441892 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.441899 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.441781 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.441968 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:57.441953903 +0000 UTC m=+34.361290356 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.442026 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:57.442006474 +0000 UTC m=+34.361342907 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.442055 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:57.442042685 +0000 UTC m=+34.361379128 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.444358 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.444417 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.444655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.444684 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.444722 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.530588 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.530637 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.530696 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.530814 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.531180 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.531262 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.542865 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.543117 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.543148 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.543165 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:49 crc kubenswrapper[4768]: E1203 16:18:49.543245 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:18:57.543215419 +0000 UTC m=+34.462551852 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.547428 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.547478 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.547495 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.547520 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.547538 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.651575 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.651676 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.651699 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.651726 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.651744 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.713847 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-vgj7g" event={"ID":"5c8b9d71-3aeb-428c-b61a-e984cce08f37","Type":"ContainerStarted","Data":"e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.722353 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.722426 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.722449 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.722497 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.722522 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.722568 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.727212 4768 generic.go:334] "Generic (PLEG): container finished" podID="455c9e5e-20c7-4bb5-8ba2-de2f122d7038" containerID="90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb" exitCode=0 Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.727264 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerDied","Data":"90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.736874 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.755676 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.755731 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.755745 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.755769 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.755781 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.757510 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.781219 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.831941 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.851715 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.859818 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.859864 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.859877 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.859896 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.859908 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.880151 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.899711 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.913437 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.928812 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.940272 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.958060 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.962420 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.962477 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.962488 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.962512 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.962527 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:49Z","lastTransitionTime":"2025-12-03T16:18:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.974125 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.987229 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:49 crc kubenswrapper[4768]: I1203 16:18:49.999317 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:49Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.012540 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.026050 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.042086 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.054806 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.065580 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.065669 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.065687 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.065716 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.065736 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.068327 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.083565 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.095765 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.112290 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.131095 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.146549 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.159112 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.168549 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.168636 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.168655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.168678 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.168693 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.178453 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.195203 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.212243 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.277330 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.277694 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.277855 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.278035 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.278186 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.383023 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.383478 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.383488 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.383509 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.383520 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.487166 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.487252 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.487276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.487305 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.487326 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.590831 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.590915 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.590933 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.590967 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.590990 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.694075 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.694115 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.694124 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.694139 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.694148 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.736488 4768 generic.go:334] "Generic (PLEG): container finished" podID="455c9e5e-20c7-4bb5-8ba2-de2f122d7038" containerID="07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d" exitCode=0 Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.736632 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerDied","Data":"07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.763283 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.784271 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.797410 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.797497 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.797515 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.797539 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.797556 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.807686 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.835147 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.852469 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.882079 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.902845 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.902980 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.903004 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.903075 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.903102 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:50Z","lastTransitionTime":"2025-12-03T16:18:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.906511 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.928957 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.946465 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.968268 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:50 crc kubenswrapper[4768]: I1203 16:18:50.987891 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:50Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.004179 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.007585 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.008355 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.008380 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.008444 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.008496 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.024686 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.043218 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.111652 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.111735 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.111755 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.111789 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.111807 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.215646 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.215710 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.215727 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.215757 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.215777 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.319489 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.319556 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.319576 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.319653 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.319677 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.423535 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.423635 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.423649 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.423675 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.423694 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.526921 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.527006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.527028 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.527059 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.527082 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.531485 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.531539 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.531718 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:51 crc kubenswrapper[4768]: E1203 16:18:51.531773 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:51 crc kubenswrapper[4768]: E1203 16:18:51.531974 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:51 crc kubenswrapper[4768]: E1203 16:18:51.532145 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.631062 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.631145 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.631163 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.631190 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.631222 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.734178 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.734226 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.734235 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.734252 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.734264 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.751196 4768 generic.go:334] "Generic (PLEG): container finished" podID="455c9e5e-20c7-4bb5-8ba2-de2f122d7038" containerID="20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5" exitCode=0 Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.751232 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerDied","Data":"20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.787089 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.812674 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.829387 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.837474 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.837544 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.837569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.837633 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.837659 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.845132 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.862774 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.879472 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.898315 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.918262 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.938490 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.941754 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.941831 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.941847 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.941873 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.941887 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:51Z","lastTransitionTime":"2025-12-03T16:18:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.952297 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.980874 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:51 crc kubenswrapper[4768]: I1203 16:18:51.999801 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:51Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.016674 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.033159 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.045769 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.045841 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.045853 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.045875 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.045890 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.148500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.148539 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.148548 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.148564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.148574 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.252384 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.252457 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.252479 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.252512 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.252535 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.356376 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.356443 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.356468 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.356498 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.356522 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.459929 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.460003 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.460022 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.460051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.460071 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.563243 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.563309 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.563322 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.563344 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.563371 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.666780 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.666865 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.666883 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.666909 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.666926 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.766104 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.769301 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.769353 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.769371 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.769396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.769416 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.770763 4768 generic.go:334] "Generic (PLEG): container finished" podID="455c9e5e-20c7-4bb5-8ba2-de2f122d7038" containerID="86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b" exitCode=0 Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.770833 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerDied","Data":"86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.784250 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.803758 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.827289 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.843135 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.858960 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.872756 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.872774 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.872824 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.873018 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.873038 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.873049 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.896774 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.913468 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.927632 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.945545 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.959617 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.972823 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.976607 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.976663 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.976675 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.976696 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.976709 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:52Z","lastTransitionTime":"2025-12-03T16:18:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:52 crc kubenswrapper[4768]: I1203 16:18:52.987977 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:52Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.012349 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.079768 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.079849 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.079865 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.079894 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.079913 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.183792 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.183928 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.183948 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.183979 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.183999 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.287878 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.287950 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.287962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.287982 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.287996 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.391263 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.391678 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.391687 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.391702 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.391713 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.495818 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.495902 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.495920 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.495946 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.495964 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.531002 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.531097 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.531160 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:53 crc kubenswrapper[4768]: E1203 16:18:53.531291 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:53 crc kubenswrapper[4768]: E1203 16:18:53.531402 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:53 crc kubenswrapper[4768]: E1203 16:18:53.531623 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.565281 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.595016 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.599492 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.599564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.599591 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.599659 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.599686 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.623440 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.643767 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.664788 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.684798 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.702065 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.710528 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.710646 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.710669 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.710702 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.710723 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.728559 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.751713 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.772788 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.789223 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.814168 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.814250 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.814266 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.814290 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.814307 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.817282 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.837045 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.860645 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.918083 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.918153 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.918178 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.918210 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:53 crc kubenswrapper[4768]: I1203 16:18:53.918230 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:53Z","lastTransitionTime":"2025-12-03T16:18:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.022435 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.022564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.022592 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.022677 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.022703 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.127094 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.127168 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.127186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.127217 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.127238 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.231216 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.231262 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.231272 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.231292 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.231303 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.334221 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.334332 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.334358 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.334389 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.334410 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.437828 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.438333 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.438511 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.438766 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.438954 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.543218 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.543839 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.544079 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.544288 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.544470 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.649345 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.649411 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.649443 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.649474 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.649497 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.753398 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.753465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.753493 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.753521 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.753541 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.788272 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.788658 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.788699 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.796456 4768 generic.go:334] "Generic (PLEG): container finished" podID="455c9e5e-20c7-4bb5-8ba2-de2f122d7038" containerID="13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981" exitCode=0 Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.796529 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerDied","Data":"13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.811948 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.830826 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.831036 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.831342 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.854930 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.856877 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.856951 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.856984 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.857029 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.857057 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.880556 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.897948 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.915935 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.938284 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.955548 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.975931 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.976639 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.976665 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.976743 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.976768 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:54Z","lastTransitionTime":"2025-12-03T16:18:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.983143 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:54 crc kubenswrapper[4768]: I1203 16:18:54.999704 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:54Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.016211 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.032509 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.047720 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.080122 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.080187 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.080210 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.080242 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.080261 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.102439 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.123641 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.141763 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.161079 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.174081 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.183442 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.183489 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.183499 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.183519 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.183534 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.187689 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.201780 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.216409 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.226298 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.226339 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.226351 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.226374 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.226389 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.234013 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.244263 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.247622 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.247650 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.247661 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.247679 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.247692 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.250706 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.266702 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.270193 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.272137 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.272176 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.272188 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.272212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.272224 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.280842 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.287145 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.291688 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.291753 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.291773 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.291805 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.291834 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.310055 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.315239 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.320141 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.320185 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.320197 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.320217 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.320231 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.333554 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.340732 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.340972 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.343752 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.343829 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.343898 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.343965 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.343989 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.347899 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.447423 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.447489 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.447510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.447553 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.447573 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.530828 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.530946 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.531065 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.530836 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.531216 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:55 crc kubenswrapper[4768]: E1203 16:18:55.531355 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.551255 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.551300 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.551318 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.551341 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.551360 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.655587 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.655702 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.655723 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.655754 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.655774 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.758895 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.758983 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.759004 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.759036 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.759058 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.805932 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" event={"ID":"455c9e5e-20c7-4bb5-8ba2-de2f122d7038","Type":"ContainerStarted","Data":"aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.805957 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.821589 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.840765 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.859410 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.861704 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.861751 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.861766 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.861787 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.861803 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.877838 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.897037 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.916573 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.935389 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.955473 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.965354 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.965422 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.965438 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.965467 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.965488 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:55Z","lastTransitionTime":"2025-12-03T16:18:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:55 crc kubenswrapper[4768]: I1203 16:18:55.983549 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:55Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.003851 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.020875 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.051710 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.068475 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.068519 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.068530 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.068548 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.068560 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.072653 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.092965 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:56Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.172014 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.172084 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.172098 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.172126 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.172141 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.275457 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.275510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.275522 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.275566 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.275581 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.379673 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.379746 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.379770 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.379803 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.379825 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.482922 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.482994 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.483012 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.483039 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.483053 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.586975 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.587037 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.587058 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.587100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.587123 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.690074 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.690153 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.690170 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.690196 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.690213 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.794337 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.794396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.794412 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.794436 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.794453 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.809496 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.897722 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.897795 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.897814 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.897842 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:56 crc kubenswrapper[4768]: I1203 16:18:56.897860 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:56Z","lastTransitionTime":"2025-12-03T16:18:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.001662 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.001741 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.001759 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.001788 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.001806 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.105841 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.105917 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.105937 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.105967 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.105989 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.209724 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.209847 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.209883 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.209924 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.209952 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.313432 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.313500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.313519 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.313546 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.313566 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.417774 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.417849 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.417871 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.417906 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.417934 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.522282 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.522349 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.522362 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.522387 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.522402 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.530544 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.530582 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.530753 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.530842 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.531017 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.531287 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.541475 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.541761 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:19:13.541726637 +0000 UTC m=+50.461063210 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.541862 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.541963 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542033 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.542051 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542118 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:13.542090546 +0000 UTC m=+50.461427009 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542167 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542197 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542223 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:13.542213089 +0000 UTC m=+50.461549522 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542238 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542268 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.542351 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:13.542318841 +0000 UTC m=+50.461655294 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.626738 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.626813 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.626833 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.626863 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.626881 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.643719 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.644078 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.644129 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.644158 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:57 crc kubenswrapper[4768]: E1203 16:18:57.644272 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:13.644241893 +0000 UTC m=+50.563578356 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.730288 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.730363 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.730383 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.730420 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.730511 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.817660 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/0.log" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.823036 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a" exitCode=1 Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.823111 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.825435 4768 scope.go:117] "RemoveContainer" containerID="079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.833474 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.833540 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.833564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.833630 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.833661 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.848346 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.877949 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.904110 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.930663 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.937543 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.937825 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.937993 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.938188 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.938359 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:57Z","lastTransitionTime":"2025-12-03T16:18:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.970395 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:57Z\\\",\\\"message\\\":\\\"1203 16:18:57.533590 6049 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.533961 6049 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534033 6049 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534264 6049 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534459 6049 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535067 6049 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535120 6049 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:57.535144 6049 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:57.535171 6049 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:57.535214 6049 factory.go:656] Stopping watch factory\\\\nI1203 16:18:57.535254 6049 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:57.535260 6049 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:57.535285 6049 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:57 crc kubenswrapper[4768]: I1203 16:18:57.997247 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.017851 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.042395 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.042468 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.042487 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.042516 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.042535 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.043299 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.066591 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.085204 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.102080 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.125185 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.146108 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.146222 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.146242 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.146273 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.146293 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.149032 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.172525 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.249892 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.249954 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.249973 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.250000 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.250019 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.353715 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.353789 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.353808 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.353839 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.353860 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.457956 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.458041 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.458063 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.458101 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.458123 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.561553 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.561672 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.561706 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.561745 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.561776 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.665524 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.665583 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.665636 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.665663 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.665680 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.769543 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.769623 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.769638 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.769663 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.769680 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.829647 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/0.log" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.832607 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.832764 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.847437 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.864344 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.872491 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.872536 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.872549 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.872571 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.872584 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.880651 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.901934 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.928864 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:57Z\\\",\\\"message\\\":\\\"1203 16:18:57.533590 6049 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.533961 6049 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534033 6049 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534264 6049 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534459 6049 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535067 6049 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535120 6049 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:57.535144 6049 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:57.535171 6049 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:57.535214 6049 factory.go:656] Stopping watch factory\\\\nI1203 16:18:57.535254 6049 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:57.535260 6049 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:57.535285 6049 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.946376 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.964738 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.975125 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.975179 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.975194 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.975219 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.975237 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:58Z","lastTransitionTime":"2025-12-03T16:18:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.981552 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:58 crc kubenswrapper[4768]: I1203 16:18:58.998919 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:58Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.015250 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.029190 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.046981 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.070327 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.078251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.078339 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.078359 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.078390 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.078410 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.089391 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:18:59Z is after 2025-08-24T17:21:41Z" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.181495 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.181561 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.181582 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.181638 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.181665 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.288003 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.288073 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.288110 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.288173 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.288213 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.391846 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.391909 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.391928 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.391955 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.391972 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.496160 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.496230 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.496251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.496278 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.496295 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.531281 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.531358 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:18:59 crc kubenswrapper[4768]: E1203 16:18:59.531519 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.531663 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:18:59 crc kubenswrapper[4768]: E1203 16:18:59.531845 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:18:59 crc kubenswrapper[4768]: E1203 16:18:59.532040 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.599421 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.599918 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.600064 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.600244 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.600385 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.703846 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.703924 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.703944 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.703975 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.704007 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.807738 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.807829 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.807847 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.807877 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.807897 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.911549 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.911641 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.911661 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.911688 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:18:59 crc kubenswrapper[4768]: I1203 16:18:59.911706 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:18:59Z","lastTransitionTime":"2025-12-03T16:18:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.016516 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.016634 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.016663 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.016704 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.016730 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.120939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.121355 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.121477 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.121655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.121820 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.225870 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.226865 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.227016 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.227111 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.227191 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.330530 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.330579 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.330633 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.330664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.330685 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.434940 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.435003 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.435021 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.435051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.435080 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.538861 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.538919 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.538936 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.538962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.538981 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.641915 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.641995 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.642013 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.642047 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.642067 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.745236 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.745293 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.745312 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.745341 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.745363 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.844263 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/1.log" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.845453 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/0.log" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.847642 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.847701 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.847721 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.847760 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.847789 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.850312 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f" exitCode=1 Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.850375 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.850436 4768 scope.go:117] "RemoveContainer" containerID="079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.851698 4768 scope.go:117] "RemoveContainer" containerID="8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f" Dec 03 16:19:00 crc kubenswrapper[4768]: E1203 16:19:00.851974 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.872321 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:00Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.905268 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:57Z\\\",\\\"message\\\":\\\"1203 16:18:57.533590 6049 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.533961 6049 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534033 6049 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534264 6049 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534459 6049 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535067 6049 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535120 6049 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:57.535144 6049 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:57.535171 6049 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:57.535214 6049 factory.go:656] Stopping watch factory\\\\nI1203 16:18:57.535254 6049 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:57.535260 6049 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:57.535285 6049 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:00Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.928447 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:00Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.950477 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:00Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.951457 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.951520 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.951539 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.951568 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.951588 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:00Z","lastTransitionTime":"2025-12-03T16:19:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.973404 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:00Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:00 crc kubenswrapper[4768]: I1203 16:19:00.996196 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:00Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.025055 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.042383 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.057175 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.057242 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.057260 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.057289 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.057308 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.072462 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf"] Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.073246 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.073218 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.076308 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.076627 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.101432 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.125344 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.143561 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.161452 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.161514 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.161535 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.161571 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.161631 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.168062 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.181321 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9fs4\" (UniqueName: \"kubernetes.io/projected/cea00839-26de-4cb0-9639-ac6217a11fd3-kube-api-access-r9fs4\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.181415 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cea00839-26de-4cb0-9639-ac6217a11fd3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.181482 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cea00839-26de-4cb0-9639-ac6217a11fd3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.181522 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cea00839-26de-4cb0-9639-ac6217a11fd3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.186862 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.207436 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.221346 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.235797 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.254466 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.266227 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.266275 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.266285 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.266306 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.266320 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.278525 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.282740 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9fs4\" (UniqueName: \"kubernetes.io/projected/cea00839-26de-4cb0-9639-ac6217a11fd3-kube-api-access-r9fs4\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.282986 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cea00839-26de-4cb0-9639-ac6217a11fd3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.283216 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cea00839-26de-4cb0-9639-ac6217a11fd3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.283387 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cea00839-26de-4cb0-9639-ac6217a11fd3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.284476 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/cea00839-26de-4cb0-9639-ac6217a11fd3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.284726 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/cea00839-26de-4cb0-9639-ac6217a11fd3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.292088 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/cea00839-26de-4cb0-9639-ac6217a11fd3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.301506 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.310842 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9fs4\" (UniqueName: \"kubernetes.io/projected/cea00839-26de-4cb0-9639-ac6217a11fd3-kube-api-access-r9fs4\") pod \"ovnkube-control-plane-749d76644c-pgdpf\" (UID: \"cea00839-26de-4cb0-9639-ac6217a11fd3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.317817 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.335922 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.353496 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.369359 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.369421 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.369441 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.369470 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.369490 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.371636 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.387885 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.395653 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.414926 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: W1203 16:19:01.420476 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcea00839_26de_4cb0_9639_ac6217a11fd3.slice/crio-f5c3d27612ef6341c48c9b5f06246fcb683d9165e8dcbb12f2566bd25c6229a8 WatchSource:0}: Error finding container f5c3d27612ef6341c48c9b5f06246fcb683d9165e8dcbb12f2566bd25c6229a8: Status 404 returned error can't find the container with id f5c3d27612ef6341c48c9b5f06246fcb683d9165e8dcbb12f2566bd25c6229a8 Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.438100 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.452719 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.472493 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.472547 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.472564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.472622 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.472645 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.482061 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:57Z\\\",\\\"message\\\":\\\"1203 16:18:57.533590 6049 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.533961 6049 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534033 6049 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534264 6049 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534459 6049 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535067 6049 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535120 6049 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:57.535144 6049 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:57.535171 6049 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:57.535214 6049 factory.go:656] Stopping watch factory\\\\nI1203 16:18:57.535254 6049 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:57.535260 6049 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:57.535285 6049 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.530699 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.530748 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.530817 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:01 crc kubenswrapper[4768]: E1203 16:19:01.530839 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:01 crc kubenswrapper[4768]: E1203 16:19:01.531049 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:01 crc kubenswrapper[4768]: E1203 16:19:01.531184 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.576209 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.576280 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.576298 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.576328 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.576349 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.680065 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.680452 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.680472 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.680505 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.680529 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.784417 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.784515 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.784534 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.784564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.784582 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.828150 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-j25k6"] Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.828955 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:01 crc kubenswrapper[4768]: E1203 16:19:01.829067 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.856150 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.858629 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/1.log" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.866370 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" event={"ID":"cea00839-26de-4cb0-9639-ac6217a11fd3","Type":"ContainerStarted","Data":"9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.866437 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" event={"ID":"cea00839-26de-4cb0-9639-ac6217a11fd3","Type":"ContainerStarted","Data":"f5c3d27612ef6341c48c9b5f06246fcb683d9165e8dcbb12f2566bd25c6229a8"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.876317 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.888152 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.888225 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.888247 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.888276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.888296 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.893260 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjblt\" (UniqueName: \"kubernetes.io/projected/772886ae-dcfc-418e-ac82-49d7844c99f1-kube-api-access-tjblt\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.893385 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.896747 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.919061 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.943116 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.964234 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.983934 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:01Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.991650 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.991706 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.991724 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.991750 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.991771 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:01Z","lastTransitionTime":"2025-12-03T16:19:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.994622 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:01 crc kubenswrapper[4768]: I1203 16:19:01.994740 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjblt\" (UniqueName: \"kubernetes.io/projected/772886ae-dcfc-418e-ac82-49d7844c99f1-kube-api-access-tjblt\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:01 crc kubenswrapper[4768]: E1203 16:19:01.994863 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:01 crc kubenswrapper[4768]: E1203 16:19:01.994976 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:02.494944892 +0000 UTC m=+39.414281525 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.008801 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.028970 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjblt\" (UniqueName: \"kubernetes.io/projected/772886ae-dcfc-418e-ac82-49d7844c99f1-kube-api-access-tjblt\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.037476 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.055676 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.092209 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:57Z\\\",\\\"message\\\":\\\"1203 16:18:57.533590 6049 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.533961 6049 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534033 6049 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534264 6049 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534459 6049 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535067 6049 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535120 6049 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:57.535144 6049 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:57.535171 6049 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:57.535214 6049 factory.go:656] Stopping watch factory\\\\nI1203 16:18:57.535254 6049 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:57.535260 6049 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:57.535285 6049 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.094626 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.094665 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.094677 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.094697 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.094711 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.109741 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.124522 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.137558 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.151474 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.168182 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.197937 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.197993 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.198006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.198033 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.198051 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.300727 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.300778 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.300788 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.300806 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.300819 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.404160 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.404209 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.404220 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.404240 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.404251 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.500871 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:02 crc kubenswrapper[4768]: E1203 16:19:02.501082 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:02 crc kubenswrapper[4768]: E1203 16:19:02.501172 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:03.501145049 +0000 UTC m=+40.420481502 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.508297 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.508357 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.508375 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.508405 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.508424 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.612452 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.612512 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.612532 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.612563 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.612583 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.715849 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.715918 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.715936 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.715968 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.715988 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.820158 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.820212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.820228 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.820248 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.820262 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.874941 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" event={"ID":"cea00839-26de-4cb0-9639-ac6217a11fd3","Type":"ContainerStarted","Data":"d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.899353 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.919272 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.923969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.924038 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.924057 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.924085 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.924108 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:02Z","lastTransitionTime":"2025-12-03T16:19:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.936362 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.966383 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:57Z\\\",\\\"message\\\":\\\"1203 16:18:57.533590 6049 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.533961 6049 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534033 6049 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534264 6049 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534459 6049 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535067 6049 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535120 6049 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:57.535144 6049 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:57.535171 6049 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:57.535214 6049 factory.go:656] Stopping watch factory\\\\nI1203 16:18:57.535254 6049 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:57.535260 6049 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:57.535285 6049 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:02 crc kubenswrapper[4768]: I1203 16:19:02.986044 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:02Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.005803 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.023678 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.027296 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.027343 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.027362 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.027419 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.027438 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.042574 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.066352 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.088659 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.104752 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.122896 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.130332 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.130408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.130429 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.130465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.130488 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.140414 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.162374 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.181558 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.206586 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.234419 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.234485 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.234497 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.234517 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.234529 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.337511 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.337592 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.337664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.337700 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.337724 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.441258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.441343 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.441362 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.441397 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.441415 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.519305 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:03 crc kubenswrapper[4768]: E1203 16:19:03.519516 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:03 crc kubenswrapper[4768]: E1203 16:19:03.519645 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:05.519579157 +0000 UTC m=+42.438915620 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.531341 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.531386 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.531423 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.531468 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:03 crc kubenswrapper[4768]: E1203 16:19:03.531680 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:03 crc kubenswrapper[4768]: E1203 16:19:03.531819 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:03 crc kubenswrapper[4768]: E1203 16:19:03.531981 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:03 crc kubenswrapper[4768]: E1203 16:19:03.532098 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.544055 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.544103 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.544123 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.544148 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.544181 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.551414 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.567757 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.593281 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://079a88112f25affb9427e0b45bdc79bed80aefe316a9c7e185f19e076b04aa8a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:57Z\\\",\\\"message\\\":\\\"1203 16:18:57.533590 6049 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.533961 6049 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534033 6049 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534264 6049 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.534459 6049 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535067 6049 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:57.535120 6049 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:57.535144 6049 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:57.535171 6049 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:57.535214 6049 factory.go:656] Stopping watch factory\\\\nI1203 16:18:57.535254 6049 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:57.535260 6049 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:57.535285 6049 handler.go:208] Removed *v1.Node ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.612714 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.639073 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.647439 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.647516 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.647539 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.647570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.647588 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.656461 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.671451 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.690060 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.708523 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.725442 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.743884 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.750385 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.750872 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.751184 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.751469 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.751755 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.769315 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.791206 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.809009 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.833245 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.856386 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:03Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.856470 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.857099 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.857230 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.857371 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.857517 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.961393 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.961460 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.961480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.961511 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:03 crc kubenswrapper[4768]: I1203 16:19:03.961534 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:03Z","lastTransitionTime":"2025-12-03T16:19:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.065536 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.065629 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.065644 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.065673 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.065693 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.168900 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.168963 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.168981 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.169011 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.169030 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.272661 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.272731 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.272755 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.272800 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.272826 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.375885 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.375943 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.375963 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.375987 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.376008 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.480148 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.480216 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.480231 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.480262 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.480280 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.583567 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.583673 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.583695 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.583725 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.583749 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.687174 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.687238 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.687258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.687287 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.687307 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.791241 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.791319 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.791338 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.791377 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.791409 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.895063 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.895141 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.895162 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.895193 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.895213 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.999402 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.999466 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:04 crc kubenswrapper[4768]: I1203 16:19:04.999495 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:04.999532 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:04.999558 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:04Z","lastTransitionTime":"2025-12-03T16:19:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.102760 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.102817 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.102835 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.102862 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.102881 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.206288 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.206377 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.206396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.206427 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.206449 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.310214 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.310321 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.310350 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.310389 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.310411 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.414798 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.414869 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.414890 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.414921 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.414942 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.519116 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.519502 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.519684 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.519840 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.519974 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.530900 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.530949 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.531014 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.531124 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.531116 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.531328 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.531500 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.531692 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.545498 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.545778 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.545890 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:09.54585695 +0000 UTC m=+46.465193413 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.623421 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.623493 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.623513 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.623542 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.623567 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.658291 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.658420 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.658481 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.658514 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.658567 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.683847 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:05Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.689709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.689781 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.689799 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.689856 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.689876 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.711167 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:05Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.716777 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.717026 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.717079 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.717119 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.717146 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.741820 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:05Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.747709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.747776 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.747796 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.747824 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.747842 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.772309 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:05Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.778016 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.778073 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.778091 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.778121 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.778142 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.800795 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:05Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:05 crc kubenswrapper[4768]: E1203 16:19:05.801028 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.804037 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.804118 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.804139 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.804173 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.804198 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.907962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.908026 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.908043 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.908068 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:05 crc kubenswrapper[4768]: I1203 16:19:05.908087 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:05Z","lastTransitionTime":"2025-12-03T16:19:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.013014 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.013081 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.013100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.013129 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.013152 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.116746 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.116833 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.116856 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.116891 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.116916 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.220874 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.220931 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.220941 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.220962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.220974 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.324178 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.324251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.324272 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.324299 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.324318 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.427409 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.427487 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.427513 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.427549 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.427569 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.530885 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.530959 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.530979 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.531007 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.531028 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.634880 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.634961 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.634980 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.635013 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.635033 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.738779 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.738857 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.738877 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.738908 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.738926 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.842570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.842682 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.842701 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.842732 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.842755 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.946867 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.946957 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.946978 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.947026 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:06 crc kubenswrapper[4768]: I1203 16:19:06.947058 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:06Z","lastTransitionTime":"2025-12-03T16:19:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.050664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.050740 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.050757 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.050781 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.050800 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.154045 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.154111 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.154133 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.154161 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.154180 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.257588 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.257677 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.257695 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.257726 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.257746 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.361302 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.361378 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.361398 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.361429 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.361451 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.465207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.465283 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.465302 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.465330 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.465351 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.531653 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.531699 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.531811 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:07 crc kubenswrapper[4768]: E1203 16:19:07.531930 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.531955 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:07 crc kubenswrapper[4768]: E1203 16:19:07.532197 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:07 crc kubenswrapper[4768]: E1203 16:19:07.532359 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:07 crc kubenswrapper[4768]: E1203 16:19:07.532456 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.568664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.568732 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.568755 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.568789 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.568810 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.672338 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.672399 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.672420 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.672451 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.672472 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.775337 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.775407 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.775426 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.775459 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.775477 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.879367 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.879433 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.879453 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.879525 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.879547 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.983144 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.983247 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.983274 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.983311 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:07 crc kubenswrapper[4768]: I1203 16:19:07.983340 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:07Z","lastTransitionTime":"2025-12-03T16:19:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.086846 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.086929 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.086946 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.086976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.087000 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.190212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.190282 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.190298 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.190330 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.190351 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.293982 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.294098 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.294126 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.294167 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.294193 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.397855 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.397937 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.397955 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.397986 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.398005 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.500569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.500676 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.500696 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.500728 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.500749 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.604705 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.604787 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.604809 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.604848 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.604872 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.708667 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.708735 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.708760 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.708794 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.708823 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.812099 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.812163 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.812182 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.812212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.812233 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.916128 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.916187 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.916206 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.916228 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:08 crc kubenswrapper[4768]: I1203 16:19:08.916247 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:08Z","lastTransitionTime":"2025-12-03T16:19:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.020081 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.020137 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.020155 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.020177 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.020194 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.123940 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.124030 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.124051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.124084 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.124106 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.226974 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.227045 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.227065 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.227096 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.227125 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.331410 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.331476 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.331504 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.331529 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.331548 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.435773 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.435868 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.435897 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.435939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.435969 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.531491 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.531516 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.531580 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.531723 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:09 crc kubenswrapper[4768]: E1203 16:19:09.531940 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:09 crc kubenswrapper[4768]: E1203 16:19:09.531966 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:09 crc kubenswrapper[4768]: E1203 16:19:09.531713 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:09 crc kubenswrapper[4768]: E1203 16:19:09.532110 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.539410 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.539477 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.539494 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.539523 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.539552 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.594961 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:09 crc kubenswrapper[4768]: E1203 16:19:09.595189 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:09 crc kubenswrapper[4768]: E1203 16:19:09.595278 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:17.595253361 +0000 UTC m=+54.514589824 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.644951 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.645037 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.645056 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.645088 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.645110 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.749254 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.749334 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.749355 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.749385 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.749403 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.853799 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.853909 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.853934 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.853970 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.853995 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.958386 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.958503 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.958525 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.958553 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:09 crc kubenswrapper[4768]: I1203 16:19:09.958571 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:09Z","lastTransitionTime":"2025-12-03T16:19:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.063105 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.063197 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.063222 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.063265 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.063290 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.166636 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.166709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.166728 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.166759 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.166780 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.270835 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.270939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.270959 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.270987 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.271010 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.374829 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.374951 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.374975 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.375013 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.375038 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.479421 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.479526 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.479548 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.479580 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.479634 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.584038 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.584115 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.584135 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.584171 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.584190 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.688180 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.688256 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.688273 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.688302 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.688321 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.791893 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.791962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.791979 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.792007 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.792026 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.895958 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.896055 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.896081 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.896117 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:10 crc kubenswrapper[4768]: I1203 16:19:10.896142 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:10Z","lastTransitionTime":"2025-12-03T16:19:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.000249 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.000341 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.000360 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.000389 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.000409 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.104767 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.104832 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.104847 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.104873 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.104888 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.207935 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.208016 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.208037 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.208541 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.208584 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.312988 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.313067 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.313089 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.313123 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.313143 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.418154 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.418273 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.418295 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.418328 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.418349 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.521830 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.521922 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.521941 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.521972 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.521994 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.531491 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.531519 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.531491 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:11 crc kubenswrapper[4768]: E1203 16:19:11.531679 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:11 crc kubenswrapper[4768]: E1203 16:19:11.531872 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:11 crc kubenswrapper[4768]: E1203 16:19:11.531983 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.531975 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:11 crc kubenswrapper[4768]: E1203 16:19:11.532096 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.626258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.626329 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.626348 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.626377 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.626397 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.730276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.730365 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.730390 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.730431 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.730461 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.834300 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.834378 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.834395 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.834426 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.834444 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.937754 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.937838 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.937856 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.937887 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:11 crc kubenswrapper[4768]: I1203 16:19:11.937932 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:11Z","lastTransitionTime":"2025-12-03T16:19:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.041344 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.041428 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.041447 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.041480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.041500 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.144675 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.144723 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.144733 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.144751 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.144762 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.247687 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.247740 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.247757 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.247783 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.247803 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.351510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.352117 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.352293 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.352460 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.352593 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.363312 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.364716 4768 scope.go:117] "RemoveContainer" containerID="8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.387264 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.409203 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.422961 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.435252 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.453748 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.456001 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.456069 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.456088 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.456118 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.456137 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.469313 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.490286 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.507960 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.526622 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.544510 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.559397 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.559464 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.559484 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.559519 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.559540 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.565778 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.581230 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.595023 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.620716 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.645943 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.662652 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.662722 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.662736 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.662761 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.662780 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.670959 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.765793 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.765881 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.765902 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.765934 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.765953 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.871487 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.871564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.871581 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.871637 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.871674 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.922693 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/1.log" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.927448 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1"} Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.928168 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.947953 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.969664 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:12Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.975672 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.975752 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.975774 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.975799 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:12 crc kubenswrapper[4768]: I1203 16:19:12.975815 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:12Z","lastTransitionTime":"2025-12-03T16:19:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.009366 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.034970 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.056258 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.070623 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.078859 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.078921 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.078944 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.078975 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.078991 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.087415 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.105818 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.118551 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.130767 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.143986 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.158517 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.173231 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.182187 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.182237 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.182248 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.182268 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.182279 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.188518 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.206142 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.219691 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.284546 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.284754 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.284769 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.284790 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.284803 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.388465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.388523 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.388537 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.388558 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.388573 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.492288 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.492331 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.492344 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.492364 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.492377 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.531145 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.531196 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.531327 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.531428 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.531424 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.531639 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.531788 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.531879 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.543237 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.543395 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543520 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:19:45.543475099 +0000 UTC m=+82.462811562 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543581 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543659 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543681 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.543723 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543766 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:45.543736355 +0000 UTC m=+82.463072818 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.543805 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543876 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543959 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.543978 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:45.54394456 +0000 UTC m=+82.463281213 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.544061 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:45.544045512 +0000 UTC m=+82.463382145 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.552552 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.572624 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.586291 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.596182 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.596220 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.596243 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.596263 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.596293 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.596313 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.599007 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.610521 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.614444 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.639814 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.644850 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.645169 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.645224 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.645249 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.645346 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:45.645315868 +0000 UTC m=+82.564652331 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.659428 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.678868 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.695197 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.699294 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.699343 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.699355 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.699374 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.699388 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.717369 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.737169 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.754547 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.782116 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.797372 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.802665 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.802767 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.802788 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.802815 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.802832 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.811276 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.824485 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.839487 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.864804 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.888671 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.906351 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.906426 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.906444 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.906474 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.906493 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:13Z","lastTransitionTime":"2025-12-03T16:19:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.912051 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.936161 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/2.log" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.936148 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.937275 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/1.log" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.942336 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1" exitCode=1 Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.942397 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1"} Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.942504 4768 scope.go:117] "RemoveContainer" containerID="8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.944850 4768 scope.go:117] "RemoveContainer" containerID="4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1" Dec 03 16:19:13 crc kubenswrapper[4768]: E1203 16:19:13.945157 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.972809 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:13 crc kubenswrapper[4768]: I1203 16:19:13.998011 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:13Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.010224 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.010302 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.010320 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.010349 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.010369 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.017147 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.033917 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.052570 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.086892 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.109963 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.113404 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.113462 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.113475 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.113499 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.113514 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.129729 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.145250 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.165561 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.182938 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.201184 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.217010 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.217063 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.217082 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.217113 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.217128 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.224541 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.239880 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.257997 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.276938 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.292016 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.313196 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.321228 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.321301 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.321315 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.321341 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.321363 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.331240 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.345381 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.366814 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8f42cf1bd285df32cd333de6421ebc8a46500af59628aeaf7324d17dfb56ba4f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:18:59Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:18:59.115073 6178 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:18:59.115101 6178 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:18:59.115125 6178 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1203 16:18:59.115129 6178 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:18:59.115146 6178 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:18:59.115151 6178 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:18:59.115161 6178 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:18:59.115184 6178 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:18:59.115185 6178 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1203 16:18:59.115212 6178 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:18:59.115233 6178 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:18:59.115308 6178 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:18:59.115314 6178 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1203 16:18:59.115370 6178 factory.go:656] Stopping watch factory\\\\nI1203 16:18:59.115398 6178 ovnkube.go:599] Stopped ovnkube\\\\nI1203 16:18:5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:58Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.383118 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.395703 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.411855 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.426091 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.426160 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.426179 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.426207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.426228 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.429714 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.448018 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.466225 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.482637 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.500160 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.529439 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.529503 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.529514 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.529533 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.529545 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.633123 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.633175 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.633184 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.633205 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.633218 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.736843 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.736895 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.736904 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.736932 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.736945 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.839113 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.839154 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.839163 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.839184 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.839203 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.942913 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.943019 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.943040 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.943155 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.943178 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:14Z","lastTransitionTime":"2025-12-03T16:19:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.948923 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/2.log" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.955255 4768 scope.go:117] "RemoveContainer" containerID="4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1" Dec 03 16:19:14 crc kubenswrapper[4768]: E1203 16:19:14.955532 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.973285 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:14 crc kubenswrapper[4768]: I1203 16:19:14.991044 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:14Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.013528 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.034384 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.046565 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.046639 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.046651 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.046675 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.046689 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.054190 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.074789 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.095400 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.113926 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.132303 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.150040 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.150093 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.150108 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.150129 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.150145 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.150456 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.176113 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.198867 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.215636 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.228094 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.253381 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.253730 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.253801 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.253871 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.253934 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.255168 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.278975 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.298770 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:15Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.357768 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.357817 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.357834 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.357861 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.357882 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.461103 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.461173 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.461190 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.461221 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.461239 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.531106 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.531106 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:15 crc kubenswrapper[4768]: E1203 16:19:15.532012 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.531281 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:15 crc kubenswrapper[4768]: E1203 16:19:15.532153 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:15 crc kubenswrapper[4768]: E1203 16:19:15.532184 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.531180 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:15 crc kubenswrapper[4768]: E1203 16:19:15.532779 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.565216 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.565290 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.565310 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.565338 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.565358 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.668651 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.668739 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.668765 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.668795 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.668816 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.771853 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.771919 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.771937 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.771968 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.771990 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.875210 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.875275 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.875292 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.875319 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.875337 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.978553 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.978635 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.978653 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.978678 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:15 crc kubenswrapper[4768]: I1203 16:19:15.978701 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:15Z","lastTransitionTime":"2025-12-03T16:19:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.064177 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.064253 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.064273 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.064301 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.064323 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: E1203 16:19:16.086259 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.091568 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.091662 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.091683 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.091711 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.091731 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: E1203 16:19:16.110922 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.116635 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.116697 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.116713 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.116737 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.116752 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: E1203 16:19:16.137817 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.144081 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.144154 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.144171 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.144199 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.144218 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: E1203 16:19:16.164822 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.169740 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.169790 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.169804 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.169823 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.169835 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: E1203 16:19:16.187024 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:16Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:16 crc kubenswrapper[4768]: E1203 16:19:16.187204 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.189422 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.189451 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.189462 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.189480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.189493 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.292312 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.292373 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.292385 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.292406 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.292419 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.395176 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.395211 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.395221 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.395238 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.395252 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.498586 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.498672 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.498689 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.498716 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.498736 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.601655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.601709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.601728 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.601753 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.601773 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.704836 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.704901 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.704940 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.704980 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.705003 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.808206 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.808278 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.808302 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.808332 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.808355 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.911551 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.911632 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.911652 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.911682 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:16 crc kubenswrapper[4768]: I1203 16:19:16.911704 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:16Z","lastTransitionTime":"2025-12-03T16:19:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.014822 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.014899 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.014913 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.014936 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.014952 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.118397 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.118472 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.118484 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.118512 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.118526 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.221875 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.221947 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.221971 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.222001 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.222021 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.325026 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.325107 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.325129 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.325163 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.325188 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.427924 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.427962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.427976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.427995 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.428008 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.530806 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.530888 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.531010 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.531013 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.531092 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.531184 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: E1203 16:19:17.531169 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.531206 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.531276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: E1203 16:19:17.531294 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.531296 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: E1203 16:19:17.531491 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:17 crc kubenswrapper[4768]: E1203 16:19:17.531699 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.633979 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.634361 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.634477 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.634647 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.634765 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.692648 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:17 crc kubenswrapper[4768]: E1203 16:19:17.692816 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:17 crc kubenswrapper[4768]: E1203 16:19:17.692907 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:19:33.692879426 +0000 UTC m=+70.612215889 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.738529 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.738630 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.738649 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.738675 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.738693 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.842440 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.842487 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.842522 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.842541 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.842551 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.945710 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.945786 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.945811 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.946011 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:17 crc kubenswrapper[4768]: I1203 16:19:17.946097 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:17Z","lastTransitionTime":"2025-12-03T16:19:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.048687 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.048741 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.048757 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.048776 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.048791 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.152534 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.152586 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.152612 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.152636 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.152650 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.256159 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.256657 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.256670 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.256724 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.256742 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.360388 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.360474 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.360499 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.360537 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.360566 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.464072 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.464117 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.464128 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.464146 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.464157 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.569253 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.569347 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.569371 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.569408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.569442 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.674207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.674494 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.674560 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.674589 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.674637 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.778247 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.778412 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.778432 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.778505 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.778528 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.882385 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.883523 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.883784 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.884018 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.884240 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.987928 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.987993 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.988013 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.988042 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:18 crc kubenswrapper[4768]: I1203 16:19:18.988061 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:18Z","lastTransitionTime":"2025-12-03T16:19:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.091750 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.091832 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.091851 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.091880 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.091903 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.194855 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.194945 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.194967 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.194992 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.195010 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.298386 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.298475 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.298494 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.298522 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.298543 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.401284 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.401377 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.401402 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.401426 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.401443 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.504537 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.504590 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.504647 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.504682 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.504706 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.531283 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.531345 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.531283 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:19 crc kubenswrapper[4768]: E1203 16:19:19.531470 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.531539 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:19 crc kubenswrapper[4768]: E1203 16:19:19.531675 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:19 crc kubenswrapper[4768]: E1203 16:19:19.531803 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:19 crc kubenswrapper[4768]: E1203 16:19:19.532017 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.608362 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.608445 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.608466 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.608497 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.608518 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.712191 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.712250 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.712264 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.712287 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.712301 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.815843 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.815919 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.815936 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.815965 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.815987 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.918894 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.918976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.918999 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.919034 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:19 crc kubenswrapper[4768]: I1203 16:19:19.919055 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:19Z","lastTransitionTime":"2025-12-03T16:19:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.022562 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.022682 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.022704 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.022733 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.022751 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.125548 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.125645 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.125655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.125674 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.125686 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.229666 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.229739 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.229766 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.229805 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.229830 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.332896 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.332961 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.332976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.333001 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.333016 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.436280 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.436366 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.436384 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.436416 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.436439 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.539946 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.540020 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.540038 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.540072 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.540091 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.643312 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.643375 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.643394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.643420 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.643439 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.747165 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.747233 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.747252 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.747280 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.747302 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.851066 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.851142 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.851162 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.851192 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.851213 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.954682 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.954765 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.954784 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.954818 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:20 crc kubenswrapper[4768]: I1203 16:19:20.954840 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:20Z","lastTransitionTime":"2025-12-03T16:19:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.058698 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.058786 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.058804 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.058836 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.058857 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.162127 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.162204 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.162228 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.162262 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.162286 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.266294 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.266366 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.266386 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.266416 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.266440 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.370392 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.370483 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.370503 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.370537 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.370564 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.474700 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.474768 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.474788 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.474818 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.474839 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.531006 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.531105 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.531207 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:21 crc kubenswrapper[4768]: E1203 16:19:21.531399 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.531745 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:21 crc kubenswrapper[4768]: E1203 16:19:21.531849 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:21 crc kubenswrapper[4768]: E1203 16:19:21.532097 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:21 crc kubenswrapper[4768]: E1203 16:19:21.532326 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.577964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.578034 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.578052 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.578087 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.578109 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.681949 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.682021 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.682041 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.682069 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.682091 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.785038 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.785142 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.785161 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.785191 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.785210 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.888529 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.888649 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.888670 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.888702 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.888720 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.991422 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.991490 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.991513 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.991542 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:21 crc kubenswrapper[4768]: I1203 16:19:21.991565 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:21Z","lastTransitionTime":"2025-12-03T16:19:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.094941 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.094986 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.095001 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.095024 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.095040 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.197797 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.197897 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.197917 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.197939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.197958 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.301073 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.301157 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.301177 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.301207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.301231 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.405015 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.405091 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.405111 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.405144 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.405164 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.508991 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.509052 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.509068 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.509094 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.509108 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.612497 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.612570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.612614 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.612645 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.612668 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.715948 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.716002 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.716015 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.716035 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.716048 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.819662 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.819720 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.819739 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.819760 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.819773 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.922566 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.922657 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.922678 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.922724 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:22 crc kubenswrapper[4768]: I1203 16:19:22.922743 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:22Z","lastTransitionTime":"2025-12-03T16:19:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.031282 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.031383 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.031396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.031418 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.031430 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.135124 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.135192 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.135203 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.135223 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.135235 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.238895 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.238959 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.238977 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.239003 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.239024 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.342667 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.342731 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.342750 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.342778 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.342800 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.446115 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.446189 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.446206 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.446232 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.446251 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.531880 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.531967 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.532051 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.531901 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:23 crc kubenswrapper[4768]: E1203 16:19:23.532110 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:23 crc kubenswrapper[4768]: E1203 16:19:23.532318 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:23 crc kubenswrapper[4768]: E1203 16:19:23.532543 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:23 crc kubenswrapper[4768]: E1203 16:19:23.532718 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.549583 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.549664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.549680 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.549703 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.549720 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.552945 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.577801 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.595309 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.617584 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.642215 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.653793 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.653869 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.653888 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.653917 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.653935 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.668001 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.688749 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.705890 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.723754 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.746729 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.756405 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.756457 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.756471 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.756502 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.756520 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.762409 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.777571 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.797716 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.819903 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.838650 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.854281 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.860003 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.860069 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.860090 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.860117 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.860134 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.892331 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:23Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.963430 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.963481 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.963495 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.963793 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:23 crc kubenswrapper[4768]: I1203 16:19:23.963812 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:23Z","lastTransitionTime":"2025-12-03T16:19:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.067534 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.067646 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.067678 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.067714 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.067739 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.170874 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.170939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.170957 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.170984 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.171002 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.274312 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.274386 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.274403 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.274431 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.274452 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.378379 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.378482 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.378501 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.378531 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.378553 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.481951 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.481999 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.482008 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.482028 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.482040 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.585350 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.585415 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.585425 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.585444 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.585455 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.695877 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.695956 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.695974 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.696002 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.696019 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.800338 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.800411 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.800430 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.800458 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.800478 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.904232 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.904291 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.904304 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.904326 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:24 crc kubenswrapper[4768]: I1203 16:19:24.904340 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:24Z","lastTransitionTime":"2025-12-03T16:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.007541 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.007640 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.007663 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.007693 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.007712 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.111313 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.111381 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.111400 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.111429 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.111448 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.215186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.215259 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.215271 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.215297 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.215311 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.318733 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.318784 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.318797 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.318819 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.318834 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.422032 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.422090 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.422102 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.422124 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.422137 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.525187 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.525220 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.525234 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.525251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.525265 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.534170 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:25 crc kubenswrapper[4768]: E1203 16:19:25.534309 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.534489 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:25 crc kubenswrapper[4768]: E1203 16:19:25.534536 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.534656 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:25 crc kubenswrapper[4768]: E1203 16:19:25.534707 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.535051 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:25 crc kubenswrapper[4768]: E1203 16:19:25.535111 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.628700 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.628737 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.628746 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.628761 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.628771 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.732064 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.732114 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.732126 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.732148 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.732161 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.836188 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.836269 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.836304 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.836339 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.836370 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.939883 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.939938 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.939953 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.939972 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:25 crc kubenswrapper[4768]: I1203 16:19:25.939988 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:25Z","lastTransitionTime":"2025-12-03T16:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.043586 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.043653 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.043664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.043683 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.043693 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.146669 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.146753 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.146776 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.146810 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.146834 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.250868 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.250944 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.250961 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.250988 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.251008 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.309657 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.309720 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.309739 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.309765 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.309782 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: E1203 16:19:26.333336 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.339423 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.339491 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.339509 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.339541 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.339561 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: E1203 16:19:26.356417 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.361960 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.362010 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.362027 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.362051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.362069 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: E1203 16:19:26.377941 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.383050 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.383099 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.383114 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.383135 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.383150 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: E1203 16:19:26.400976 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.405939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.405982 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.405993 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.406009 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.406019 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: E1203 16:19:26.430464 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:26Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:26 crc kubenswrapper[4768]: E1203 16:19:26.430813 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.434165 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.434238 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.434254 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.434283 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.434301 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.538843 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.539314 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.539543 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.539809 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.540022 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.546364 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.643185 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.643783 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.643998 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.644437 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.644707 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.747790 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.747860 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.747873 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.747895 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.747912 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.851093 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.851163 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.851174 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.851194 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.851213 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.954949 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.955066 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.955090 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.955121 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:26 crc kubenswrapper[4768]: I1203 16:19:26.955157 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:26Z","lastTransitionTime":"2025-12-03T16:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.059314 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.059385 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.059409 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.059441 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.059465 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.163004 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.163049 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.163061 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.163082 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.163100 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.266551 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.266617 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.266630 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.266655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.266667 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.369729 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.369806 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.369824 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.369853 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.369877 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.472555 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.472620 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.472629 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.472647 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.472656 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.531445 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.531449 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.531521 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.531570 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:27 crc kubenswrapper[4768]: E1203 16:19:27.531597 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:27 crc kubenswrapper[4768]: E1203 16:19:27.531792 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:27 crc kubenswrapper[4768]: E1203 16:19:27.531987 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:27 crc kubenswrapper[4768]: E1203 16:19:27.532114 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.575315 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.575360 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.575374 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.575396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.575413 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.679524 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.679644 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.679664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.679694 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.679715 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.783276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.783352 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.783368 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.783394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.783407 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.886861 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.886946 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.886968 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.887002 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.887025 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.990132 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.990672 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.990821 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.990969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:27 crc kubenswrapper[4768]: I1203 16:19:27.991103 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:27Z","lastTransitionTime":"2025-12-03T16:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.093713 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.093799 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.093811 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.093832 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.093844 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.197205 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.197260 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.197270 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.197292 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.197304 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.301119 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.301183 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.301201 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.301232 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.301256 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.404209 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.404277 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.404295 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.404329 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.404350 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.507846 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.507896 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.507907 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.507928 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.507942 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.611407 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.611451 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.611462 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.611483 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.611499 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.714462 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.714500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.714510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.714527 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.714537 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.818196 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.818317 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.818336 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.818365 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.818385 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.922187 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.922248 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.922257 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.922282 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:28 crc kubenswrapper[4768]: I1203 16:19:28.922295 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:28Z","lastTransitionTime":"2025-12-03T16:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.024885 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.024932 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.024944 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.024967 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.024979 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.127673 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.127728 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.127738 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.127758 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.127772 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.231164 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.231236 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.231249 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.231273 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.231289 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.333736 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.333810 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.333827 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.333853 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.333875 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.436316 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.436374 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.436383 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.436402 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.436416 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.531008 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.531099 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.531109 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.531200 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:29 crc kubenswrapper[4768]: E1203 16:19:29.531193 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:29 crc kubenswrapper[4768]: E1203 16:19:29.531335 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:29 crc kubenswrapper[4768]: E1203 16:19:29.531468 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:29 crc kubenswrapper[4768]: E1203 16:19:29.531548 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.538916 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.538951 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.538965 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.538984 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.538999 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.641658 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.641719 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.641731 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.641769 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.641782 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.745126 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.745198 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.745216 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.745249 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.745270 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.852221 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.852271 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.852280 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.852299 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.852309 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.955638 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.955707 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.955725 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.955756 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:29 crc kubenswrapper[4768]: I1203 16:19:29.955775 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:29Z","lastTransitionTime":"2025-12-03T16:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.058482 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.058553 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.058563 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.058585 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.058620 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.161768 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.161839 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.161858 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.161888 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.161909 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.264487 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.264550 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.264571 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.264624 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.264644 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.367495 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.367531 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.367539 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.367552 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.367562 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.470131 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.470195 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.470212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.470240 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.470260 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.532641 4768 scope.go:117] "RemoveContainer" containerID="4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1" Dec 03 16:19:30 crc kubenswrapper[4768]: E1203 16:19:30.532860 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.573626 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.573696 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.573715 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.573745 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.573764 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.676973 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.677047 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.677102 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.677134 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.677153 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.780671 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.780721 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.780732 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.780750 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.780760 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.883365 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.883432 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.883444 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.883465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.883480 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.986251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.986305 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.986322 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.986342 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:30 crc kubenswrapper[4768]: I1203 16:19:30.986356 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:30Z","lastTransitionTime":"2025-12-03T16:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.089234 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.089269 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.089278 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.089295 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.089305 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.192705 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.192762 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.192773 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.192798 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.192814 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.297716 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.297776 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.297786 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.297807 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.297817 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.400706 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.400767 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.400784 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.400809 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.400827 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.503647 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.503677 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.503690 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.503703 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.503713 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.530720 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.530855 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.530720 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.530910 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:31 crc kubenswrapper[4768]: E1203 16:19:31.530969 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:31 crc kubenswrapper[4768]: E1203 16:19:31.531167 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:31 crc kubenswrapper[4768]: E1203 16:19:31.531284 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:31 crc kubenswrapper[4768]: E1203 16:19:31.531441 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.606355 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.606411 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.606426 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.606445 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.606458 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.709813 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.709882 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.709902 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.709932 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.709954 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.813272 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.813361 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.813381 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.813410 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.813428 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.916367 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.916416 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.916434 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.916457 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:31 crc kubenswrapper[4768]: I1203 16:19:31.916476 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:31Z","lastTransitionTime":"2025-12-03T16:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.018572 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.018655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.018670 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.018692 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.018706 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.121808 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.121885 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.121902 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.121932 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.121952 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.224793 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.224867 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.224886 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.224917 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.224942 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.328261 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.328321 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.328331 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.328348 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.328358 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.430888 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.431040 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.431067 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.431108 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.431130 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.533807 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.533884 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.533903 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.533933 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.533955 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.636990 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.637058 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.637076 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.637103 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.637123 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.739942 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.739997 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.740018 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.740043 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.740064 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.843372 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.843484 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.843505 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.843562 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.843589 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.946881 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.946956 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.946973 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.947005 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:32 crc kubenswrapper[4768]: I1203 16:19:32.947023 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:32Z","lastTransitionTime":"2025-12-03T16:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.050465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.050547 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.050570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.050642 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.050669 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.153021 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.153058 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.153070 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.153087 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.153099 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.256462 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.256517 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.256541 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.256569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.256587 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.360062 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.360153 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.360174 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.360205 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.360239 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.462970 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.463014 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.463027 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.463043 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.463056 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.531303 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.531326 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.531341 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:33 crc kubenswrapper[4768]: E1203 16:19:33.531449 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.531468 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:33 crc kubenswrapper[4768]: E1203 16:19:33.531658 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:33 crc kubenswrapper[4768]: E1203 16:19:33.531691 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:33 crc kubenswrapper[4768]: E1203 16:19:33.531754 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.547306 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.561884 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.566576 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.566651 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.566670 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.566702 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.566723 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.579779 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.595757 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.614891 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.629663 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.653387 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.668498 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.669930 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.670011 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.670025 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.670070 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.670088 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.684308 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.704172 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.727072 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.749530 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.768724 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.772544 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.772589 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.772627 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.772645 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.772656 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.781521 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.782814 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:33 crc kubenswrapper[4768]: E1203 16:19:33.782945 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:33 crc kubenswrapper[4768]: E1203 16:19:33.782998 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:05.78298208 +0000 UTC m=+102.702318503 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.794948 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.804661 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85500369-1cce-4c55-b0e7-6297001e4507\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.817279 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.829802 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:33Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.875528 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.875556 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.875579 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.875610 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.875627 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.978683 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.979137 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.979147 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.979175 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:33 crc kubenswrapper[4768]: I1203 16:19:33.979188 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:33Z","lastTransitionTime":"2025-12-03T16:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.083490 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.083554 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.083572 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.083639 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.083660 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.187050 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.187125 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.187144 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.187173 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.187191 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.290637 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.291084 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.291175 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.291296 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.291373 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.394822 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.395684 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.395788 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.395901 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.396002 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.499151 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.499195 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.499207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.499228 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.499244 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.602532 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.602582 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.602613 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.602634 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.602646 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.704741 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.704798 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.704812 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.704837 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.704850 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.808251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.808348 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.808377 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.808408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.808427 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.911063 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.911122 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.911135 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.911157 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:34 crc kubenswrapper[4768]: I1203 16:19:34.911172 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:34Z","lastTransitionTime":"2025-12-03T16:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.013486 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.013535 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.013550 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.013569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.013583 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.033452 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/0.log" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.033506 4768 generic.go:334] "Generic (PLEG): container finished" podID="d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9" containerID="8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255" exitCode=1 Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.033577 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerDied","Data":"8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.034067 4768 scope.go:117] "RemoveContainer" containerID="8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.075975 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.092749 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.109061 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.117716 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.117760 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.117772 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.117793 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.117807 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.126321 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.142136 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.154877 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85500369-1cce-4c55-b0e7-6297001e4507\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.177295 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.203757 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.221153 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.221186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.221198 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.221218 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.221232 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.223530 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.243060 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.263303 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.285525 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.300011 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.317825 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.323856 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.323933 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.323961 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.324002 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.324030 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.338572 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.361970 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:34Z\\\",\\\"message\\\":\\\"2025-12-03T16:18:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a\\\\n2025-12-03T16:18:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a to /host/opt/cni/bin/\\\\n2025-12-03T16:18:48Z [verbose] multus-daemon started\\\\n2025-12-03T16:18:48Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:19:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.377235 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.398665 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:35Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.427574 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.427951 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.428366 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.428635 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.428866 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.531120 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:35 crc kubenswrapper[4768]: E1203 16:19:35.531536 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.531256 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:35 crc kubenswrapper[4768]: E1203 16:19:35.531773 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.531278 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:35 crc kubenswrapper[4768]: E1203 16:19:35.532038 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.531155 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:35 crc kubenswrapper[4768]: E1203 16:19:35.532308 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.533011 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.533083 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.533102 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.533125 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.533143 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.636114 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.636574 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.636735 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.636838 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.636891 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.740315 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.740796 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.740903 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.741021 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.741132 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.845109 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.845180 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.845194 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.845247 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.845306 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.948436 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.948507 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.948520 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.948541 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:35 crc kubenswrapper[4768]: I1203 16:19:35.948555 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:35Z","lastTransitionTime":"2025-12-03T16:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.041338 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/0.log" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.041424 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerStarted","Data":"eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.051360 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.051418 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.051431 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.051455 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.051468 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.060866 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.078542 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.092911 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85500369-1cce-4c55-b0e7-6297001e4507\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.111739 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.128427 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.143556 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.155173 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.155243 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.155264 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.155291 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.155312 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.161237 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.182038 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.197301 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.219052 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.238128 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.254669 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.259281 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.259324 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.259338 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.259359 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.259379 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.279795 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:34Z\\\",\\\"message\\\":\\\"2025-12-03T16:18:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a\\\\n2025-12-03T16:18:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a to /host/opt/cni/bin/\\\\n2025-12-03T16:18:48Z [verbose] multus-daemon started\\\\n2025-12-03T16:18:48Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:19:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.297209 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.308684 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.326859 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.345436 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.363929 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.364125 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.364318 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.364483 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.364667 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.364265 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.468453 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.468513 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.468530 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.468558 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.468577 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.571969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.572387 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.572502 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.572676 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.572770 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.675755 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.675825 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.675842 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.675871 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.675890 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.677595 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.677664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.677678 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.677694 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.677707 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: E1203 16:19:36.697156 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.703657 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.703739 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.703759 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.703783 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.703804 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: E1203 16:19:36.723926 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.730100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.730319 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.730454 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.730628 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.730777 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: E1203 16:19:36.753580 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.759255 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.759319 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.759339 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.759376 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.759400 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: E1203 16:19:36.780377 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.786070 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.786145 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.786168 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.786201 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.786226 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: E1203 16:19:36.806904 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:36Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:36 crc kubenswrapper[4768]: E1203 16:19:36.807125 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.809796 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.809865 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.809884 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.809912 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.809930 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.913160 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.913247 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.913266 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.913296 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:36 crc kubenswrapper[4768]: I1203 16:19:36.913315 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:36Z","lastTransitionTime":"2025-12-03T16:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.017395 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.017465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.017482 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.017511 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.017530 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.120644 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.120720 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.120741 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.120769 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.120787 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.224837 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.224914 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.224936 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.224967 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.224995 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.328056 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.328133 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.328153 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.328181 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.328201 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.432494 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.432579 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.432633 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.432670 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.432692 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.530816 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.530939 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.530856 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.531052 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:37 crc kubenswrapper[4768]: E1203 16:19:37.531101 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:37 crc kubenswrapper[4768]: E1203 16:19:37.531279 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:37 crc kubenswrapper[4768]: E1203 16:19:37.531363 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:37 crc kubenswrapper[4768]: E1203 16:19:37.531435 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.537400 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.537569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.537638 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.537678 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.537719 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.641930 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.642002 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.642020 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.642048 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.642070 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.745844 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.745919 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.745939 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.745968 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.745990 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.850564 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.850704 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.850779 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.850815 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.850884 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.955701 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.955767 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.955787 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.955817 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:37 crc kubenswrapper[4768]: I1203 16:19:37.955839 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:37Z","lastTransitionTime":"2025-12-03T16:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.058505 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.058567 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.058584 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.058648 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.058668 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.161291 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.161348 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.161367 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.161391 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.161411 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.264403 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.264457 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.264475 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.264500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.264517 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.367024 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.367095 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.367117 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.367151 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.367173 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.470931 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.471007 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.471029 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.471052 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.471069 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.574500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.574569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.574582 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.574633 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.574648 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.678281 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.678346 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.678366 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.678392 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.678412 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.781918 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.781995 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.782013 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.782044 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.782069 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.884778 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.884861 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.884882 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.884911 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.884936 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.989053 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.989130 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.989152 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.989185 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:38 crc kubenswrapper[4768]: I1203 16:19:38.989207 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:38Z","lastTransitionTime":"2025-12-03T16:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.092706 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.092783 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.092802 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.092833 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.092853 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.196395 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.196499 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.196529 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.196562 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.196582 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.300518 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.300576 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.300593 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.300635 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.300652 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.403717 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.403801 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.403818 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.403849 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.403873 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.506465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.506512 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.506523 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.506541 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.506552 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.531161 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.531213 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.531247 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.531337 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:39 crc kubenswrapper[4768]: E1203 16:19:39.531438 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:39 crc kubenswrapper[4768]: E1203 16:19:39.531789 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:39 crc kubenswrapper[4768]: E1203 16:19:39.531679 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:39 crc kubenswrapper[4768]: E1203 16:19:39.531938 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.609997 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.610084 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.610100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.610177 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.610201 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.713525 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.713655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.713688 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.713727 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.713768 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.817866 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.817929 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.817947 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.817976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.817995 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.921115 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.921188 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.921207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.921236 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:39 crc kubenswrapper[4768]: I1203 16:19:39.921255 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:39Z","lastTransitionTime":"2025-12-03T16:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.025340 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.025442 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.025470 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.025510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.025537 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.129195 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.129455 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.129473 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.129500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.129520 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.233442 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.233517 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.233536 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.233567 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.233586 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.337654 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.337734 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.337752 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.337787 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.337807 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.441233 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.441303 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.441323 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.441356 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.441378 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.545280 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.545381 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.545399 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.545430 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.545451 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.648273 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.648354 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.648373 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.648408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.648435 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.751932 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.752006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.752022 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.752513 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.752574 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.857025 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.857101 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.857130 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.857169 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.857199 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.960895 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.960964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.960983 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.961014 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:40 crc kubenswrapper[4768]: I1203 16:19:40.961038 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:40Z","lastTransitionTime":"2025-12-03T16:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.064547 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.064654 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.064672 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.064703 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.064726 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.167933 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.168001 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.168019 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.168047 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.168067 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.272361 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.272440 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.272465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.272497 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.272521 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.376502 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.376577 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.376629 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.376660 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.376680 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.480567 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.480662 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.480685 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.480924 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.480965 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.531656 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.531762 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.531792 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:41 crc kubenswrapper[4768]: E1203 16:19:41.531826 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.531792 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:41 crc kubenswrapper[4768]: E1203 16:19:41.532007 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:41 crc kubenswrapper[4768]: E1203 16:19:41.532199 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:41 crc kubenswrapper[4768]: E1203 16:19:41.532360 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.583306 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.583354 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.583364 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.583381 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.583397 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.686051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.686113 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.686134 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.686160 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.686177 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.789830 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.789958 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.789977 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.790008 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.790028 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.894042 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.894206 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.894218 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.894240 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.894252 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.997578 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.997718 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.997743 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.997773 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:41 crc kubenswrapper[4768]: I1203 16:19:41.997793 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:41Z","lastTransitionTime":"2025-12-03T16:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.107067 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.107450 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.107465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.107492 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.107510 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.210893 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.210960 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.210978 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.211006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.211026 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.315006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.315066 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.315078 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.315105 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.315123 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.417964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.418024 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.418042 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.418064 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.418083 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.521512 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.521558 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.521575 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.521632 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.521652 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.625187 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.625233 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.625246 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.625267 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.625282 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.728716 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.728774 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.728792 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.728818 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.728838 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.832270 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.832383 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.832406 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.832441 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.832472 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.936111 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.936211 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.936238 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.936274 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:42 crc kubenswrapper[4768]: I1203 16:19:42.936300 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:42Z","lastTransitionTime":"2025-12-03T16:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.039958 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.040037 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.040059 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.040092 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.040112 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.143954 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.144006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.144026 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.144055 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.144078 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.247180 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.247251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.247270 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.247298 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.247318 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.351025 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.351091 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.351106 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.351133 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.351151 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.454934 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.455004 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.455023 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.455057 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.455079 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.531441 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.531715 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.531929 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:43 crc kubenswrapper[4768]: E1203 16:19:43.531936 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.532003 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:43 crc kubenswrapper[4768]: E1203 16:19:43.532120 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:43 crc kubenswrapper[4768]: E1203 16:19:43.532205 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:43 crc kubenswrapper[4768]: E1203 16:19:43.532371 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.550917 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.558423 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.558490 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.558508 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.558534 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.558555 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.587855 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.610738 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.637153 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.662548 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.662883 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.662964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.662991 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.663025 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.663051 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.680904 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.697934 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85500369-1cce-4c55-b0e7-6297001e4507\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.718296 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.739442 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.756087 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.766006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.766051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.766071 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.766100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.766120 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.776002 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.798731 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.821067 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.845226 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.866200 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.869572 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.869670 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.869689 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.869719 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.869738 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.891667 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.914370 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:34Z\\\",\\\"message\\\":\\\"2025-12-03T16:18:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a\\\\n2025-12-03T16:18:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a to /host/opt/cni/bin/\\\\n2025-12-03T16:18:48Z [verbose] multus-daemon started\\\\n2025-12-03T16:18:48Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:19:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.935346 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:43Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.973290 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.973351 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.973366 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.973396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:43 crc kubenswrapper[4768]: I1203 16:19:43.973410 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:43Z","lastTransitionTime":"2025-12-03T16:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.075792 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.075859 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.075877 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.075906 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.075925 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.179064 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.179135 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.179150 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.179179 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.179197 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.282379 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.282450 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.282477 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.282510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.282536 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.386149 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.386215 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.386234 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.386258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.386276 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.490324 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.490396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.490414 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.490453 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.490479 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.532364 4768 scope.go:117] "RemoveContainer" containerID="4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.594125 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.594192 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.594212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.594240 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.594262 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.698471 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.698975 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.698994 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.699025 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.699046 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.803109 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.803186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.803209 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.803239 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.803265 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.907268 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.907335 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.907359 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.907410 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:44 crc kubenswrapper[4768]: I1203 16:19:44.907432 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:44Z","lastTransitionTime":"2025-12-03T16:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.011644 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.011720 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.011741 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.011770 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.011790 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.080230 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/2.log" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.084032 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.084776 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.114832 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.114898 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.114912 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.114949 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.114965 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.117815 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.144079 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.164038 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.191130 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.203321 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85500369-1cce-4c55-b0e7-6297001e4507\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.217125 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.217612 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.217636 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.217646 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.217661 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.217671 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.231356 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.244352 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.257438 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.272893 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.287969 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.302413 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.319389 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.320998 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.321042 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.321053 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.321071 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.321082 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.335236 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.350127 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.365058 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.381009 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.397845 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:34Z\\\",\\\"message\\\":\\\"2025-12-03T16:18:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a\\\\n2025-12-03T16:18:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a to /host/opt/cni/bin/\\\\n2025-12-03T16:18:48Z [verbose] multus-daemon started\\\\n2025-12-03T16:18:48Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:19:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:45Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.424494 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.424856 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.424922 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.425046 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.425128 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.527849 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.527932 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.527951 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.527983 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.528003 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.531557 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.531651 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.531566 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.531811 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.531922 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.532144 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.532653 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.532502 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.631426 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.631842 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.632030 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.632197 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.632332 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.638181 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.638406 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.638576 4768 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.638541 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:49.638485429 +0000 UTC m=+146.557821912 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.638719 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:49.638691735 +0000 UTC m=+146.558028198 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.638838 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.638929 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.639110 4768 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.639183 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.639242 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.639284 4768 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.639251 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:49.639217878 +0000 UTC m=+146.558554341 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.639463 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:49.639402703 +0000 UTC m=+146.558739166 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.736997 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.737463 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.737590 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.737740 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.737861 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.739914 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.740201 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.740257 4768 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.740283 4768 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:45 crc kubenswrapper[4768]: E1203 16:19:45.740407 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:49.740375512 +0000 UTC m=+146.659711975 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.842087 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.842195 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.842218 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.842251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.842273 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.946258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.946347 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.946366 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.946398 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:45 crc kubenswrapper[4768]: I1203 16:19:45.946418 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:45Z","lastTransitionTime":"2025-12-03T16:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.049805 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.049885 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.049904 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.049934 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.049953 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.153505 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.153570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.153588 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.153649 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.153669 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.257676 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.257736 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.257755 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.257784 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.257803 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.361404 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.361465 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.361487 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.361517 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.361537 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.465318 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.465389 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.465409 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.465438 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.465460 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.554951 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.568944 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.569048 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.569069 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.569096 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.569117 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.673128 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.673454 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.673547 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.673653 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.673721 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.777682 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.778186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.778323 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.778495 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.778662 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.882693 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.882750 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.882764 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.882788 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.882802 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.916133 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.916206 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.916228 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.916257 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.916278 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: E1203 16:19:46.937372 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.943084 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.943169 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.943187 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.943215 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.943235 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: E1203 16:19:46.963628 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.969157 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.969197 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.969212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.969234 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.969250 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:46 crc kubenswrapper[4768]: E1203 16:19:46.987842 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:46Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.993626 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.993687 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.993701 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.993723 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:46 crc kubenswrapper[4768]: I1203 16:19:46.993741 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:46Z","lastTransitionTime":"2025-12-03T16:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.016174 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.021742 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.021797 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.021805 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.021825 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.021837 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.041877 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.042054 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.044331 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.044394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.044408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.044430 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.044447 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.100788 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/3.log" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.101609 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/2.log" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.105051 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e" exitCode=1 Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.105131 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.105202 4768 scope.go:117] "RemoveContainer" containerID="4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.106215 4768 scope.go:117] "RemoveContainer" containerID="3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e" Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.106377 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.124137 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:34Z\\\",\\\"message\\\":\\\"2025-12-03T16:18:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a\\\\n2025-12-03T16:18:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a to /host/opt/cni/bin/\\\\n2025-12-03T16:18:48Z [verbose] multus-daemon started\\\\n2025-12-03T16:18:48Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:19:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.144117 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.147147 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.147219 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.147230 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.147246 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.147257 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.168314 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dc94dfa-2582-484e-a35f-4c89f4ac02ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e54bdff646b76809e1fbaac3e0881ac21311b6347270117d14113a1f3ca3077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88808a65a8983f8cac59cfa2db8e097dc8c2c0395b3121b4dbbc89fbbff2cf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476fd31133a2c7f6ff99562b16dec4ece22f94cdd5d0c5c1e138ae4d65c862a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a57fe486321b00bf04c4d4e7f2c6ce0d3123a70fd386d5bae43d594bb76591\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af4a80cf1721650ceda0bde2feba84dfd03f274e019b9d0f1348f7ff92f55f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6898ccc169a048ff2933a9f7d690840c204ab235b149d5787c32a732a7de5f48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6898ccc169a048ff2933a9f7d690840c204ab235b149d5787c32a732a7de5f48\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d7dd2a1ed57d9c41204f95193d64f87f89cab3452397d1a3fe653809153d589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d7dd2a1ed57d9c41204f95193d64f87f89cab3452397d1a3fe653809153d589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://33c19eac967d642ed4e5dc6ed0e18c5c7f7d351a3fdd8e550924c2b413382e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33c19eac967d642ed4e5dc6ed0e18c5c7f7d351a3fdd8e550924c2b413382e41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.191976 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.212962 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.234177 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.251092 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.251207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.251218 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.251243 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.251259 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.253415 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.272265 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.286705 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.310819 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:141\\\\nI1203 16:19:45.677488 6785 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:19:45.677570 6785 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:19:45.678189 6785 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:19:45.679142 6785 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:19:45.679215 6785 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:19:45.679349 6785 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:19:45.679398 6785 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:19:45.679334 6785 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:19:45.679468 6785 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:19:45.679489 6785 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:19:45.679537 6785 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:19:45.679627 6785 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:19:45.679645 6785 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:19:45.679725 6785 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.326158 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85500369-1cce-4c55-b0e7-6297001e4507\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.341823 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.353819 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.353858 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.353870 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.353893 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.353907 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.359453 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.375944 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.397113 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.419591 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.439329 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.457037 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.457090 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.457115 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.457148 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.457176 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.457612 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.481076 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:47Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.530913 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.531005 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.530939 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.531127 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.531192 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.531341 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.531450 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:47 crc kubenswrapper[4768]: E1203 16:19:47.531528 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.559855 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.559916 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.559964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.559986 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.560001 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.663976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.664033 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.664046 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.664067 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.664078 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.767489 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.767556 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.767578 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.767639 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.767662 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.871270 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.871344 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.871363 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.871394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.871413 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.974052 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.974124 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.974146 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.974177 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:47 crc kubenswrapper[4768]: I1203 16:19:47.974196 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:47Z","lastTransitionTime":"2025-12-03T16:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.077872 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.077953 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.077971 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.078000 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.078021 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.112656 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/3.log" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.181684 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.181759 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.181785 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.181817 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.181841 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.284441 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.284527 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.284550 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.284581 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.284629 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.388427 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.388498 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.388513 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.388535 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.388553 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.492061 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.492128 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.492139 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.492164 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.492178 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.595852 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.595943 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.595967 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.595999 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.596021 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.700276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.700365 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.700383 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.700414 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.700432 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.804815 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.805251 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.805469 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.805692 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.805858 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.909493 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.909574 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.909624 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.909655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:48 crc kubenswrapper[4768]: I1203 16:19:48.909675 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:48Z","lastTransitionTime":"2025-12-03T16:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.012920 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.012961 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.012969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.012986 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.012997 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.115709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.115784 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.115806 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.115840 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.115862 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.219038 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.219111 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.219127 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.219174 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.219192 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.322375 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.322441 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.322458 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.322483 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.322502 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.425822 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.425888 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.425906 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.425935 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.425955 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.529182 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.529265 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.529284 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.529311 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.529330 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.531592 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.531727 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.531790 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:49 crc kubenswrapper[4768]: E1203 16:19:49.531903 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.531931 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:49 crc kubenswrapper[4768]: E1203 16:19:49.532135 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:49 crc kubenswrapper[4768]: E1203 16:19:49.532398 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:49 crc kubenswrapper[4768]: E1203 16:19:49.532655 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.632654 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.632728 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.632746 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.632773 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.632793 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.736964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.737043 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.737060 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.737090 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.737110 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.840829 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.840901 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.840917 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.840943 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.840964 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.944469 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.944522 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.944532 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.944552 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:49 crc kubenswrapper[4768]: I1203 16:19:49.944571 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:49Z","lastTransitionTime":"2025-12-03T16:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.048144 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.048211 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.048229 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.048258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.048279 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.151472 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.151540 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.151553 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.151577 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.151620 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.255010 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.255085 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.255104 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.255134 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.255158 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.358825 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.359383 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.359416 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.359455 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.359478 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.462732 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.462785 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.462795 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.462813 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.462825 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.565824 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.565903 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.565922 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.565945 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.565965 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.669205 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.669277 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.669297 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.669328 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.669346 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.772094 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.772179 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.772197 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.772229 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.772250 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.876389 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.876464 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.876481 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.876513 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.876540 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.980403 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.980483 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.980500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.980530 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:50 crc kubenswrapper[4768]: I1203 16:19:50.980548 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:50Z","lastTransitionTime":"2025-12-03T16:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.084460 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.084538 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.084556 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.084585 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.084644 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.190934 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.191036 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.191086 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.191123 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.191148 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.296194 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.296271 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.296292 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.296322 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.296342 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.400683 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.400763 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.400780 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.400808 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.400826 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.504406 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.504475 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.504493 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.504522 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.504542 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.530703 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.530785 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:51 crc kubenswrapper[4768]: E1203 16:19:51.530876 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.530935 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.530973 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:51 crc kubenswrapper[4768]: E1203 16:19:51.531238 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:51 crc kubenswrapper[4768]: E1203 16:19:51.531340 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:51 crc kubenswrapper[4768]: E1203 16:19:51.531535 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.607958 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.608027 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.608047 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.608071 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.608093 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.712100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.712174 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.712200 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.712230 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.712249 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.815231 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.815335 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.815357 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.815391 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.815414 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.919461 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.919526 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.919544 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.919570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:51 crc kubenswrapper[4768]: I1203 16:19:51.919590 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:51Z","lastTransitionTime":"2025-12-03T16:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.023000 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.023068 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.023086 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.023118 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.023139 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.126914 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.126975 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.126996 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.127023 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.127041 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.229890 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.229946 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.229965 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.229991 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.230010 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.335164 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.335208 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.335219 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.335236 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.335248 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.439171 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.439265 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.439294 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.439335 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.439363 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.542590 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.542697 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.542715 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.542743 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.542763 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.646702 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.646779 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.646799 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.646832 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.646853 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.750315 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.750416 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.750436 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.750472 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.750493 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.853557 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.853667 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.853692 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.853764 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.853787 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.957356 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.957437 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.957461 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.957498 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:52 crc kubenswrapper[4768]: I1203 16:19:52.957536 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:52Z","lastTransitionTime":"2025-12-03T16:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.061099 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.061181 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.061199 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.061231 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.061252 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.164001 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.164061 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.164073 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.164098 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.164111 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.266999 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.267072 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.267090 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.267165 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.267191 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.371315 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.371394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.371414 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.371445 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.371467 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.476118 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.476202 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.476224 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.476255 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.476276 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.530981 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:53 crc kubenswrapper[4768]: E1203 16:19:53.531157 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.531266 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.531458 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:53 crc kubenswrapper[4768]: E1203 16:19:53.531474 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.531502 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:53 crc kubenswrapper[4768]: E1203 16:19:53.531679 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:53 crc kubenswrapper[4768]: E1203 16:19:53.531770 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.549776 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"85500369-1cce-4c55-b0e7-6297001e4507\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e8f0dba1d362addfdfbc3c7c84e05417daf7f6ca81f01c8d37c1146ec39e4ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e0927a6652cfbcf40ab433a2e00237f9d34de1c650f10b979fa1159d46246a7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.567219 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.583008 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.583087 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.583110 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.583146 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.583172 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.587883 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fea3ff9c-dadb-4168-90c0-24bc05a888e9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://42e7cd01933cc8d3b33a7fda86b9c52975e51a8bed0a1dea0a762875297cc4dc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg8jp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g94rv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.607970 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-j25k6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"772886ae-dcfc-418e-ac82-49d7844c99f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tjblt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-j25k6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.629669 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.653433 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb1f1439256a399149b4968f43550bd0a65555af7a0f6092da2f0fc8f89d983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc9210925c7fb19508be86945f70e4f29fa85dca177b87a504440da3813ef2d2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.670028 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:41Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.682399 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-64w5t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9b155707-262e-482b-92c7-e097de0ff1d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://749b4280d43e2c310d400ed6978e8caf96863e015853a50fed6c7ee1fd95ff76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8l8ps\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-64w5t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.687583 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.687691 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.687713 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.687743 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.687767 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.704152 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cea00839-26de-4cb0-9639-ac6217a11fd3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9da7e7d3d45795db094214afbe8feb23815c3049f88c20aa5ff810c307599bc2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d777a51c366e547502bdb98976cd8404b4f202327d12db7006e5127ac9d5688a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-r9fs4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:19:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-pgdpf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.726545 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8d4dq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:34Z\\\",\\\"message\\\":\\\"2025-12-03T16:18:48+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a\\\\n2025-12-03T16:18:48+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_8877449f-b8ac-49c1-bbfe-e9512a1e556a to /host/opt/cni/bin/\\\\n2025-12-03T16:18:48Z [verbose] multus-daemon started\\\\n2025-12-03T16:18:48Z [verbose] Readiness Indicator file check\\\\n2025-12-03T16:19:33Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:19:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-shk7l\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8d4dq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.751645 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58b3fcfb-2165-49c7-b989-b79970d932fd\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1a91be61855e6fe46879c33275d6d298bce59bf0b1c594ad5e5abc6be850c41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://139a88d62930bb5ee20ee562e6754489de7669b0ec1cf789a1f9f81db66200f0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7b8bc399f001a94eb1e6e5ff626497e5666504d9dc330921d82c4cb6d0a620ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://026c1176078ccb2101b3da65ee2093de1ec539b539ba2e651a39d2ebf09dac51\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.777547 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dc94dfa-2582-484e-a35f-4c89f4ac02ae\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e54bdff646b76809e1fbaac3e0881ac21311b6347270117d14113a1f3ca3077\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88808a65a8983f8cac59cfa2db8e097dc8c2c0395b3121b4dbbc89fbbff2cf3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://476fd31133a2c7f6ff99562b16dec4ece22f94cdd5d0c5c1e138ae4d65c862a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f8a57fe486321b00bf04c4d4e7f2c6ce0d3123a70fd386d5bae43d594bb76591\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://af4a80cf1721650ceda0bde2feba84dfd03f274e019b9d0f1348f7ff92f55f0f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6898ccc169a048ff2933a9f7d690840c204ab235b149d5787c32a732a7de5f48\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6898ccc169a048ff2933a9f7d690840c204ab235b149d5787c32a732a7de5f48\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d7dd2a1ed57d9c41204f95193d64f87f89cab3452397d1a3fe653809153d589\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d7dd2a1ed57d9c41204f95193d64f87f89cab3452397d1a3fe653809153d589\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://33c19eac967d642ed4e5dc6ed0e18c5c7f7d351a3fdd8e550924c2b413382e41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://33c19eac967d642ed4e5dc6ed0e18c5c7f7d351a3fdd8e550924c2b413382e41\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.792525 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.792577 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.792588 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.792630 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.792642 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.795203 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61d5f829ff2c094b24b968492abc8659b31f9d8fab55d6a817a8f9ba539739e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.809941 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9ce3aded6c2533dc463b12f42bb1cb184f2567867926d259aeb5211e84b2a7e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.827495 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2htqq" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"455c9e5e-20c7-4bb5-8ba2-de2f122d7038\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa0c543ee6f4aea7b5d8f3fdb0d182feccde5f6f63fbec5106bea1d672fc13a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13f2395139205f953055f070f47b0963076a6c0776d6f8eaad48e1c015d69e98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://90357da336aeae4a5c027de98bb436bdc7f6e83d7bd51c018abaffd49088f4fb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://07d24f6b6fde139e84928addc100fca4966d9b6b87df42af38140bfe9d67398d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://20f3a12ffd67640f7581285a23d0bdb4897290e14ac32662994aa957e3c785d5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://86aba65336b86f19c59546e815dddb63ad8d8574c3788d80cce66eb002218d1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://13238e4159e7b8ea51a789c37cff539e64989c2f93900e502b004518f5de7981\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6xg2b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2htqq\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.843736 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"edc4814f-6b71-488a-8a4b-f76360b9d1b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.859048 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b1adf05-6acb-4b5f-a224-bd33772107e8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://975fb98fa91107768f3064e78ab8309b78fcee54d96092cb3dcf05413d22706a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f545c19200a337d1c94f8c0f71c4bb2bd7110f2836fcaf2671687f23285c08a4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6568ed974fdbfe72849a1fabe0a99ee2fce9a6eca799255b2e1b7f827d98221\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:23Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.876918 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-vgj7g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8b9d71-3aeb-428c-b61a-e984cce08f37\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e0559e9c323e9169ef04562976c05a392c07ce7f5280eb15e39b6cc5fc5cbd1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-76tfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:46Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-vgj7g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.897569 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.897666 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.897688 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.897718 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.897738 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:53Z","lastTransitionTime":"2025-12-03T16:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:53 crc kubenswrapper[4768]: I1203 16:19:53.905714 4768 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"29dd67f1-08a6-43ed-840d-cf4b166d5664\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-03T16:18:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4569bc1b45436df4fd86289171704b00fe8f9d0eda147d713439ae420951e8a1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:13Z\\\",\\\"message\\\":\\\"atus:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI1203 16:19:13.553629 6388 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553883 6388 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553867 6388 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-j25k6 in node crc\\\\nI1203 16:19:13.553897 6388 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI1203 16:19:13.553908 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI1203 16:19:13.553916 6388 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI1203 16:19:13.553772 6388 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf in node crc\\\\nI1203 16:19:13.553935 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf after 0 failed attempt(s)\\\\nI1203 16:19:13.553591 6388 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-vgj7g after 0 failed attempt(s)\\\\nI1203 16:19:13.5\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-03T16:19:46Z\\\",\\\"message\\\":\\\"rs/externalversions/factory.go:141\\\\nI1203 16:19:45.677488 6785 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:19:45.677570 6785 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:19:45.678189 6785 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1203 16:19:45.679142 6785 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1203 16:19:45.679215 6785 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1203 16:19:45.679349 6785 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1203 16:19:45.679398 6785 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1203 16:19:45.679334 6785 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1203 16:19:45.679468 6785 handler.go:208] Removed *v1.Node event handler 2\\\\nI1203 16:19:45.679489 6785 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1203 16:19:45.679537 6785 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1203 16:19:45.679627 6785 handler.go:208] Removed *v1.Node event handler 7\\\\nI1203 16:19:45.679645 6785 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1203 16:19:45.679725 6785 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-03T16:19:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-03T16:18:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-03T16:18:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-03T16:18:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrwrt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-03T16:18:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5z68m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:53Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.000697 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.000767 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.000780 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.000804 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.000817 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.103969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.104023 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.104035 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.104053 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.104063 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.207772 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.207844 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.207862 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.207885 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.207901 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.310892 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.310953 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.310971 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.310993 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.311009 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.414276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.414340 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.414362 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.414394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.414416 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.517275 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.517341 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.517357 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.517379 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.517394 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.621094 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.621156 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.621176 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.621207 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.621225 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.724542 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.724631 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.724653 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.724680 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.724699 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.828480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.828551 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.828573 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.828642 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.828662 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.932031 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.932109 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.932129 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.932161 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:54 crc kubenswrapper[4768]: I1203 16:19:54.932187 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:54Z","lastTransitionTime":"2025-12-03T16:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.034967 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.035019 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.035032 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.035052 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.035066 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.138608 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.138664 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.138674 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.138695 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.138708 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.242075 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.242186 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.242214 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.242255 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.242277 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.345715 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.345783 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.345801 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.345830 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.345849 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.449439 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.449510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.449528 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.449555 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.449575 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.531404 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.531559 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:55 crc kubenswrapper[4768]: E1203 16:19:55.531655 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.531566 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.531739 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:55 crc kubenswrapper[4768]: E1203 16:19:55.531995 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:55 crc kubenswrapper[4768]: E1203 16:19:55.532064 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:55 crc kubenswrapper[4768]: E1203 16:19:55.532149 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.553407 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.553480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.553499 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.553526 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.553546 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.657157 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.657218 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.657232 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.657254 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.657268 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.760566 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.760672 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.760687 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.760712 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.760731 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.863660 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.863749 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.863767 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.863794 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.863815 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.967545 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.967661 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.967687 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.967724 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:55 crc kubenswrapper[4768]: I1203 16:19:55.967751 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:55Z","lastTransitionTime":"2025-12-03T16:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.071620 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.071690 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.071706 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.071728 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.071741 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.175148 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.175225 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.175241 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.175266 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.175280 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.278175 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.278252 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.278271 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.278300 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.278324 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.381475 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.381526 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.381538 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.381556 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.381569 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.484827 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.484889 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.484905 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.484934 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.484953 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.587301 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.587380 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.587391 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.587408 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.587520 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.691114 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.691212 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.691231 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.691292 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.691314 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.798278 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.798341 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.798357 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.798384 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.798402 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.901642 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.901708 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.901727 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.901754 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:56 crc kubenswrapper[4768]: I1203 16:19:56.901777 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:56Z","lastTransitionTime":"2025-12-03T16:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.004224 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.004275 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.004286 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.004304 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.004318 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.106730 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.106796 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.106813 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.106837 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.106860 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.210040 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.210086 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.210099 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.210118 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.210130 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.313273 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.313337 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.313355 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.313382 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.313401 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.417095 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.417149 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.417159 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.417178 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.417191 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.427889 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.427966 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.427991 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.428028 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.428052 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.450802 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.455803 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.455856 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.455865 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.455883 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.455895 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.474999 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.480759 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.480831 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.480859 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.480892 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.480919 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.497752 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.502175 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.502253 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.502276 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.502305 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.502495 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.523319 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.528237 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.528315 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.528334 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.528365 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.528385 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.531319 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.531414 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.531488 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.533260 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.533679 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.533913 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.533961 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.534039 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.544150 4768 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-03T16:19:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f04c821a-4bbc-4c51-b87d-ffb4482e494c\\\",\\\"systemUUID\\\":\\\"507ff355-2b46-4e3a-9065-268c99e59f9e\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-03T16:19:57Z is after 2025-08-24T17:21:41Z" Dec 03 16:19:57 crc kubenswrapper[4768]: E1203 16:19:57.544277 4768 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.546333 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.546400 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.546421 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.546447 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.546465 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.650194 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.650294 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.650314 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.650379 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.650412 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.754114 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.754237 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.754255 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.754328 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.754347 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.857979 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.858051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.858070 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.858099 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.858119 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.961864 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.961940 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.961962 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.961996 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:57 crc kubenswrapper[4768]: I1203 16:19:57.962020 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:57Z","lastTransitionTime":"2025-12-03T16:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.065294 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.065372 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.065396 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.065433 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.065451 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.168746 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.168803 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.168820 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.168851 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.168869 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.272501 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.272576 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.272617 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.272651 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.272671 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.376828 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.376900 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.376921 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.376950 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.376971 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.480348 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.480394 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.480405 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.480426 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.480439 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.584405 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.584477 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.584500 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.584563 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.584639 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.687321 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.687366 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.687376 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.687392 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.687405 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.791199 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.791263 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.791275 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.791299 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.791314 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.894756 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.894837 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.894860 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.894892 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.895100 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.998299 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.998382 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.998400 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.998437 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:58 crc kubenswrapper[4768]: I1203 16:19:58.998461 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:58Z","lastTransitionTime":"2025-12-03T16:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.101780 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.101862 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.101884 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.101915 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.101941 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.205484 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.205554 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.205574 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.205636 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.205659 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.308810 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.308877 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.308895 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.308931 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.308951 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.411894 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.411964 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.411983 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.412013 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.412033 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.515217 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.515258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.515270 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.515352 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.515366 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.530927 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.531002 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:19:59 crc kubenswrapper[4768]: E1203 16:19:59.531076 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:19:59 crc kubenswrapper[4768]: E1203 16:19:59.531212 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.530926 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.531282 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:19:59 crc kubenswrapper[4768]: E1203 16:19:59.531365 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:19:59 crc kubenswrapper[4768]: E1203 16:19:59.531448 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.532288 4768 scope.go:117] "RemoveContainer" containerID="3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e" Dec 03 16:19:59 crc kubenswrapper[4768]: E1203 16:19:59.532502 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.592784 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.592748792 podStartE2EDuration="1m18.592748792s" podCreationTimestamp="2025-12-03 16:18:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.561746561 +0000 UTC m=+96.481083024" watchObservedRunningTime="2025-12-03 16:19:59.592748792 +0000 UTC m=+96.512085245" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.618693 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=76.618664462 podStartE2EDuration="1m16.618664462s" podCreationTimestamp="2025-12-03 16:18:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.593094341 +0000 UTC m=+96.512430804" watchObservedRunningTime="2025-12-03 16:19:59.618664462 +0000 UTC m=+96.538000955" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.621050 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.621161 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.621181 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.621202 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.621219 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.650340 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-vgj7g" podStartSLOduration=73.65031451 podStartE2EDuration="1m13.65031451s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.619664288 +0000 UTC m=+96.539000811" watchObservedRunningTime="2025-12-03 16:19:59.65031451 +0000 UTC m=+96.569650933" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.683163 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=33.683137248 podStartE2EDuration="33.683137248s" podCreationTimestamp="2025-12-03 16:19:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.665200295 +0000 UTC m=+96.584536728" watchObservedRunningTime="2025-12-03 16:19:59.683137248 +0000 UTC m=+96.602473671" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.698775 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podStartSLOduration=73.698765682 podStartE2EDuration="1m13.698765682s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.698245648 +0000 UTC m=+96.617582071" watchObservedRunningTime="2025-12-03 16:19:59.698765682 +0000 UTC m=+96.618102095" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.723696 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.723780 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.723793 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.723815 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.723832 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.800437 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-pgdpf" podStartSLOduration=72.800402618 podStartE2EDuration="1m12.800402618s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.799623388 +0000 UTC m=+96.718959831" watchObservedRunningTime="2025-12-03 16:19:59.800402618 +0000 UTC m=+96.719739081" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.800762 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-64w5t" podStartSLOduration=73.800753898 podStartE2EDuration="1m13.800753898s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.7815148 +0000 UTC m=+96.700851233" watchObservedRunningTime="2025-12-03 16:19:59.800753898 +0000 UTC m=+96.720090351" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.826839 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.826915 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.826927 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.826950 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.826962 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.856946 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=46.856918099 podStartE2EDuration="46.856918099s" podCreationTimestamp="2025-12-03 16:19:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.81670327 +0000 UTC m=+96.736039723" watchObservedRunningTime="2025-12-03 16:19:59.856918099 +0000 UTC m=+96.776254512" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.857083 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=13.857080003 podStartE2EDuration="13.857080003s" podCreationTimestamp="2025-12-03 16:19:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.855764139 +0000 UTC m=+96.775100562" watchObservedRunningTime="2025-12-03 16:19:59.857080003 +0000 UTC m=+96.776416436" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.913917 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-2htqq" podStartSLOduration=73.913871961 podStartE2EDuration="1m13.913871961s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.912422003 +0000 UTC m=+96.831758426" watchObservedRunningTime="2025-12-03 16:19:59.913871961 +0000 UTC m=+96.833208434" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.930287 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.930360 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.930378 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.930406 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:19:59 crc kubenswrapper[4768]: I1203 16:19:59.930425 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:19:59Z","lastTransitionTime":"2025-12-03T16:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.034033 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.034094 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.034104 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.034125 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.034140 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.139905 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.139969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.139983 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.140005 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.140449 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.244323 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.244450 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.244479 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.244512 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.244534 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.348578 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.348690 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.348714 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.348745 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.348771 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.451563 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.451677 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.451701 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.451737 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.451759 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.555402 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.555461 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.555470 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.555492 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.555504 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.658905 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.658972 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.658989 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.659016 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.659035 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.763458 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.763523 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.763540 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.763570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.763631 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.866900 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.866941 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.866952 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.866969 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.866981 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.969580 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.969680 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.969701 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.969731 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:00 crc kubenswrapper[4768]: I1203 16:20:00.969752 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:00Z","lastTransitionTime":"2025-12-03T16:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.072084 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.072175 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.072198 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.072232 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.072257 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.175307 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.175363 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.175377 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.175409 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.175426 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.278631 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.278914 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.279009 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.279095 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.279183 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.382363 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.382442 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.382466 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.382504 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.382533 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.486362 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.486430 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.486455 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.486586 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.486658 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.531241 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.531353 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:01 crc kubenswrapper[4768]: E1203 16:20:01.531464 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.531636 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.531258 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:01 crc kubenswrapper[4768]: E1203 16:20:01.531807 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:01 crc kubenswrapper[4768]: E1203 16:20:01.531974 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:01 crc kubenswrapper[4768]: E1203 16:20:01.532113 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.590215 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.590295 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.590322 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.590375 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.590402 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.694119 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.694199 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.694221 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.694255 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.694279 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.797475 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.797570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.797661 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.797708 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.797762 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.900971 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.901043 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.901072 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.901100 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:01 crc kubenswrapper[4768]: I1203 16:20:01.901123 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:01Z","lastTransitionTime":"2025-12-03T16:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.005092 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.005161 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.005180 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.005206 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.005227 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.108284 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.108346 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.108363 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.108393 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.108417 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.210755 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.210804 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.210815 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.210834 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.210846 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.314937 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.314996 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.315013 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.315041 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.315056 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.417908 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.417966 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.417983 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.418002 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.418016 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.521874 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.522926 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.523134 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.523341 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.523637 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.627034 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.627079 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.627092 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.627110 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.627125 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.730666 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.730739 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.730749 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.730769 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.730782 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.833872 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.833956 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.833973 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.834001 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.834018 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.937957 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.938018 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.938033 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.938055 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:02 crc kubenswrapper[4768]: I1203 16:20:02.938075 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:02Z","lastTransitionTime":"2025-12-03T16:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.041226 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.041271 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.041283 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.041301 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.041314 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.150303 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.150385 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.150402 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.150433 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.150457 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.254111 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.254167 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.254177 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.254196 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.254210 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.357954 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.358021 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.358043 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.358072 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.358093 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.461493 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.461550 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.461561 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.461583 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.461615 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.531448 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.531562 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.532873 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.533965 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:03 crc kubenswrapper[4768]: E1203 16:20:03.534156 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:03 crc kubenswrapper[4768]: E1203 16:20:03.533944 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:03 crc kubenswrapper[4768]: E1203 16:20:03.534482 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:03 crc kubenswrapper[4768]: E1203 16:20:03.534591 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.564118 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.564165 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.564183 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.564205 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.564224 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.667510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.667679 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.667709 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.667748 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.667777 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.771258 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.771321 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.771344 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.771371 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.771393 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.873987 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.874051 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.874068 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.874096 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.874115 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.977480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.977557 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.977575 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.977601 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:03 crc kubenswrapper[4768]: I1203 16:20:03.977695 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:03Z","lastTransitionTime":"2025-12-03T16:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.080766 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.080868 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.080883 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.080912 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.080931 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.191121 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.191547 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.191688 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.191763 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.191823 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.296167 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.296225 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.296243 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.296271 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.296291 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.399731 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.400093 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.400160 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.400248 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.400325 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.503398 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.503447 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.503463 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.503508 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.503522 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.606660 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.606728 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.606747 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.606773 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.606798 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.710293 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.710372 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.710392 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.710422 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.710443 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.813780 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.814217 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.814352 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.814491 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.814627 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.917907 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.917976 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.917999 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.918034 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:04 crc kubenswrapper[4768]: I1203 16:20:04.918051 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:04Z","lastTransitionTime":"2025-12-03T16:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.021937 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.022017 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.022036 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.022070 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.022100 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.125414 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.125480 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.125492 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.125509 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.125519 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.228651 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.228801 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.228826 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.228850 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.228869 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.332765 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.332859 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.332881 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.332915 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.332937 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.436589 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.436698 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.436717 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.436743 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.436761 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.530845 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.530975 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.531018 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.531106 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:05 crc kubenswrapper[4768]: E1203 16:20:05.531264 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:05 crc kubenswrapper[4768]: E1203 16:20:05.531422 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:05 crc kubenswrapper[4768]: E1203 16:20:05.531560 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:05 crc kubenswrapper[4768]: E1203 16:20:05.531791 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.539093 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.539145 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.539163 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.539193 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.539215 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.642488 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.642650 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.642676 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.642759 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.642871 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.745783 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.745837 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.745852 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.745875 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.745891 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.795086 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:05 crc kubenswrapper[4768]: E1203 16:20:05.795285 4768 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:20:05 crc kubenswrapper[4768]: E1203 16:20:05.795407 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs podName:772886ae-dcfc-418e-ac82-49d7844c99f1 nodeName:}" failed. No retries permitted until 2025-12-03 16:21:09.795380809 +0000 UTC m=+166.714717262 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs") pod "network-metrics-daemon-j25k6" (UID: "772886ae-dcfc-418e-ac82-49d7844c99f1") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.853684 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.853814 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.853892 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.853986 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.854078 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.957773 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.957843 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.957860 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.957888 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:05 crc kubenswrapper[4768]: I1203 16:20:05.957908 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:05Z","lastTransitionTime":"2025-12-03T16:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.062521 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.062650 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.062679 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.062711 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.062732 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.166436 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.166510 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.166524 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.166546 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.166562 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.270006 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.270076 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.270091 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.270116 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.270135 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.373581 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.373655 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.373667 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.373706 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.373720 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.476438 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.476495 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.476505 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.476525 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.476538 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.580342 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.580411 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.580431 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.580456 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.580473 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.684514 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.684637 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.684667 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.684699 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.684724 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.787804 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.787874 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.787895 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.787925 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.787944 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.891110 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.891176 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.891188 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.891208 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.891223 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.995022 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.995075 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.995094 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.995123 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:06 crc kubenswrapper[4768]: I1203 16:20:06.995146 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:06Z","lastTransitionTime":"2025-12-03T16:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.098499 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.098551 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.098567 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.098632 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.098659 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.201507 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.201998 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.202147 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.202298 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.202459 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.306666 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.306738 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.306759 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.306788 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.306808 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.409365 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.409429 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.409451 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.409481 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.409502 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.511988 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.512020 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.512027 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.512043 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.512052 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.530582 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.530647 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:07 crc kubenswrapper[4768]: E1203 16:20:07.530720 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.530738 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.530583 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:07 crc kubenswrapper[4768]: E1203 16:20:07.531006 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:07 crc kubenswrapper[4768]: E1203 16:20:07.531013 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:07 crc kubenswrapper[4768]: E1203 16:20:07.531100 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.615456 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.615527 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.615546 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.615570 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.615591 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.718772 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.718828 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.718844 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.718867 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.718883 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.810930 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.810985 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.810997 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.811018 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.811031 4768 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-03T16:20:07Z","lastTransitionTime":"2025-12-03T16:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.881440 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-8d4dq" podStartSLOduration=81.881417326 podStartE2EDuration="1m21.881417326s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:19:59.93241716 +0000 UTC m=+96.851753623" watchObservedRunningTime="2025-12-03 16:20:07.881417326 +0000 UTC m=+104.800753759" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.881714 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79"] Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.885044 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.892343 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.892369 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.892931 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 16:20:07 crc kubenswrapper[4768]: I1203 16:20:07.893396 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.021408 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9987ab89-9341-4693-a4c6-a7bd7082e7ef-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.021932 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9987ab89-9341-4693-a4c6-a7bd7082e7ef-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.022095 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9987ab89-9341-4693-a4c6-a7bd7082e7ef-service-ca\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.022261 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9987ab89-9341-4693-a4c6-a7bd7082e7ef-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.022441 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9987ab89-9341-4693-a4c6-a7bd7082e7ef-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.123438 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9987ab89-9341-4693-a4c6-a7bd7082e7ef-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.123569 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9987ab89-9341-4693-a4c6-a7bd7082e7ef-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.123659 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9987ab89-9341-4693-a4c6-a7bd7082e7ef-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.123695 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9987ab89-9341-4693-a4c6-a7bd7082e7ef-service-ca\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.123733 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9987ab89-9341-4693-a4c6-a7bd7082e7ef-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.123805 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/9987ab89-9341-4693-a4c6-a7bd7082e7ef-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.123842 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/9987ab89-9341-4693-a4c6-a7bd7082e7ef-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.125231 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9987ab89-9341-4693-a4c6-a7bd7082e7ef-service-ca\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.134585 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9987ab89-9341-4693-a4c6-a7bd7082e7ef-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.145148 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9987ab89-9341-4693-a4c6-a7bd7082e7ef-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-lff79\" (UID: \"9987ab89-9341-4693-a4c6-a7bd7082e7ef\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:08 crc kubenswrapper[4768]: I1203 16:20:08.202910 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" Dec 03 16:20:09 crc kubenswrapper[4768]: I1203 16:20:09.202369 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" event={"ID":"9987ab89-9341-4693-a4c6-a7bd7082e7ef","Type":"ContainerStarted","Data":"858351606a590ee53b22fb651b4508558561ceac87d3cc1d574eb2f547dd0236"} Dec 03 16:20:09 crc kubenswrapper[4768]: I1203 16:20:09.202945 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" event={"ID":"9987ab89-9341-4693-a4c6-a7bd7082e7ef","Type":"ContainerStarted","Data":"70f0dc300263995a31b846fea8e79adfc014a4093cdebe487180ee0eba88fffb"} Dec 03 16:20:09 crc kubenswrapper[4768]: I1203 16:20:09.227677 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-lff79" podStartSLOduration=83.227648984 podStartE2EDuration="1m23.227648984s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:09.227134711 +0000 UTC m=+106.146471154" watchObservedRunningTime="2025-12-03 16:20:09.227648984 +0000 UTC m=+106.146985437" Dec 03 16:20:09 crc kubenswrapper[4768]: I1203 16:20:09.531655 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:09 crc kubenswrapper[4768]: E1203 16:20:09.531963 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:09 crc kubenswrapper[4768]: I1203 16:20:09.532028 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:09 crc kubenswrapper[4768]: I1203 16:20:09.532267 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:09 crc kubenswrapper[4768]: I1203 16:20:09.532355 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:09 crc kubenswrapper[4768]: E1203 16:20:09.532817 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:09 crc kubenswrapper[4768]: E1203 16:20:09.533641 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:09 crc kubenswrapper[4768]: E1203 16:20:09.533924 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:10 crc kubenswrapper[4768]: I1203 16:20:10.531472 4768 scope.go:117] "RemoveContainer" containerID="3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e" Dec 03 16:20:10 crc kubenswrapper[4768]: E1203 16:20:10.531688 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:20:11 crc kubenswrapper[4768]: I1203 16:20:11.531148 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:11 crc kubenswrapper[4768]: I1203 16:20:11.531240 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:11 crc kubenswrapper[4768]: I1203 16:20:11.531127 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:11 crc kubenswrapper[4768]: I1203 16:20:11.531351 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:11 crc kubenswrapper[4768]: E1203 16:20:11.531493 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:11 crc kubenswrapper[4768]: E1203 16:20:11.531761 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:11 crc kubenswrapper[4768]: E1203 16:20:11.531862 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:11 crc kubenswrapper[4768]: E1203 16:20:11.532061 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:13 crc kubenswrapper[4768]: I1203 16:20:13.531425 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:13 crc kubenswrapper[4768]: I1203 16:20:13.531621 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:13 crc kubenswrapper[4768]: E1203 16:20:13.533958 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:13 crc kubenswrapper[4768]: I1203 16:20:13.534142 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:13 crc kubenswrapper[4768]: E1203 16:20:13.534292 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:13 crc kubenswrapper[4768]: I1203 16:20:13.534156 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:13 crc kubenswrapper[4768]: E1203 16:20:13.534639 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:13 crc kubenswrapper[4768]: E1203 16:20:13.534799 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:15 crc kubenswrapper[4768]: I1203 16:20:15.531220 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:15 crc kubenswrapper[4768]: I1203 16:20:15.531533 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:15 crc kubenswrapper[4768]: I1203 16:20:15.531546 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:15 crc kubenswrapper[4768]: E1203 16:20:15.531696 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:15 crc kubenswrapper[4768]: E1203 16:20:15.531834 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:15 crc kubenswrapper[4768]: E1203 16:20:15.531922 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:15 crc kubenswrapper[4768]: I1203 16:20:15.532674 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:15 crc kubenswrapper[4768]: E1203 16:20:15.532872 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:17 crc kubenswrapper[4768]: I1203 16:20:17.530879 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:17 crc kubenswrapper[4768]: E1203 16:20:17.531140 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:17 crc kubenswrapper[4768]: I1203 16:20:17.531443 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:17 crc kubenswrapper[4768]: E1203 16:20:17.531539 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:17 crc kubenswrapper[4768]: I1203 16:20:17.531817 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:17 crc kubenswrapper[4768]: E1203 16:20:17.531950 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:17 crc kubenswrapper[4768]: I1203 16:20:17.531837 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:17 crc kubenswrapper[4768]: E1203 16:20:17.532120 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:19 crc kubenswrapper[4768]: I1203 16:20:19.530703 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:19 crc kubenswrapper[4768]: I1203 16:20:19.530734 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:19 crc kubenswrapper[4768]: I1203 16:20:19.530996 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:19 crc kubenswrapper[4768]: E1203 16:20:19.531136 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:19 crc kubenswrapper[4768]: E1203 16:20:19.531292 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:19 crc kubenswrapper[4768]: E1203 16:20:19.531581 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:19 crc kubenswrapper[4768]: I1203 16:20:19.531665 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:19 crc kubenswrapper[4768]: E1203 16:20:19.531869 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.258275 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/1.log" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.258991 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/0.log" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.259048 4768 generic.go:334] "Generic (PLEG): container finished" podID="d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9" containerID="eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709" exitCode=1 Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.259086 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerDied","Data":"eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709"} Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.259137 4768 scope.go:117] "RemoveContainer" containerID="8a4997668a936b9b57bcbe3f53d8b660547b6c0e6457eeae8dcf00218994d255" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.259933 4768 scope.go:117] "RemoveContainer" containerID="eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709" Dec 03 16:20:21 crc kubenswrapper[4768]: E1203 16:20:21.260287 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-8d4dq_openshift-multus(d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9)\"" pod="openshift-multus/multus-8d4dq" podUID="d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.531389 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:21 crc kubenswrapper[4768]: E1203 16:20:21.531587 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.532214 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.532504 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.532652 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:21 crc kubenswrapper[4768]: E1203 16:20:21.532949 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:21 crc kubenswrapper[4768]: I1203 16:20:21.533297 4768 scope.go:117] "RemoveContainer" containerID="3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e" Dec 03 16:20:21 crc kubenswrapper[4768]: E1203 16:20:21.533537 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5z68m_openshift-ovn-kubernetes(29dd67f1-08a6-43ed-840d-cf4b166d5664)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" Dec 03 16:20:21 crc kubenswrapper[4768]: E1203 16:20:21.533543 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:21 crc kubenswrapper[4768]: E1203 16:20:21.533986 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:22 crc kubenswrapper[4768]: I1203 16:20:22.264926 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/1.log" Dec 03 16:20:23 crc kubenswrapper[4768]: E1203 16:20:23.506745 4768 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 03 16:20:23 crc kubenswrapper[4768]: I1203 16:20:23.531445 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:23 crc kubenswrapper[4768]: I1203 16:20:23.531487 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:23 crc kubenswrapper[4768]: E1203 16:20:23.533040 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:23 crc kubenswrapper[4768]: I1203 16:20:23.533091 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:23 crc kubenswrapper[4768]: E1203 16:20:23.533220 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:23 crc kubenswrapper[4768]: E1203 16:20:23.533236 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:23 crc kubenswrapper[4768]: I1203 16:20:23.533127 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:23 crc kubenswrapper[4768]: E1203 16:20:23.533380 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:23 crc kubenswrapper[4768]: E1203 16:20:23.640339 4768 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:20:25 crc kubenswrapper[4768]: I1203 16:20:25.531357 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:25 crc kubenswrapper[4768]: I1203 16:20:25.531518 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:25 crc kubenswrapper[4768]: I1203 16:20:25.531832 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:25 crc kubenswrapper[4768]: E1203 16:20:25.531810 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:25 crc kubenswrapper[4768]: I1203 16:20:25.531876 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:25 crc kubenswrapper[4768]: E1203 16:20:25.531975 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:25 crc kubenswrapper[4768]: E1203 16:20:25.532156 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:25 crc kubenswrapper[4768]: E1203 16:20:25.532461 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:27 crc kubenswrapper[4768]: I1203 16:20:27.530766 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:27 crc kubenswrapper[4768]: I1203 16:20:27.530869 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:27 crc kubenswrapper[4768]: I1203 16:20:27.530847 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:27 crc kubenswrapper[4768]: I1203 16:20:27.530812 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:27 crc kubenswrapper[4768]: E1203 16:20:27.531051 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:27 crc kubenswrapper[4768]: E1203 16:20:27.531258 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:27 crc kubenswrapper[4768]: E1203 16:20:27.531532 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:27 crc kubenswrapper[4768]: E1203 16:20:27.531589 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:28 crc kubenswrapper[4768]: E1203 16:20:28.642295 4768 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:20:29 crc kubenswrapper[4768]: I1203 16:20:29.537840 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:29 crc kubenswrapper[4768]: E1203 16:20:29.538443 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:29 crc kubenswrapper[4768]: I1203 16:20:29.537907 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:29 crc kubenswrapper[4768]: E1203 16:20:29.538547 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:29 crc kubenswrapper[4768]: I1203 16:20:29.537908 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:29 crc kubenswrapper[4768]: E1203 16:20:29.538648 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:29 crc kubenswrapper[4768]: I1203 16:20:29.537884 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:29 crc kubenswrapper[4768]: E1203 16:20:29.538728 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:31 crc kubenswrapper[4768]: I1203 16:20:31.530664 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:31 crc kubenswrapper[4768]: I1203 16:20:31.530881 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:31 crc kubenswrapper[4768]: I1203 16:20:31.530891 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:31 crc kubenswrapper[4768]: I1203 16:20:31.530942 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:31 crc kubenswrapper[4768]: E1203 16:20:31.531079 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:31 crc kubenswrapper[4768]: I1203 16:20:31.531127 4768 scope.go:117] "RemoveContainer" containerID="eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709" Dec 03 16:20:31 crc kubenswrapper[4768]: E1203 16:20:31.531338 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:31 crc kubenswrapper[4768]: E1203 16:20:31.531476 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:31 crc kubenswrapper[4768]: E1203 16:20:31.531668 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:33 crc kubenswrapper[4768]: I1203 16:20:33.318233 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/1.log" Dec 03 16:20:33 crc kubenswrapper[4768]: I1203 16:20:33.318312 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerStarted","Data":"3a6bdd46568d340f1950afc6f2b9373a8d008c82e8121d47a8c36dd53954e582"} Dec 03 16:20:33 crc kubenswrapper[4768]: I1203 16:20:33.531167 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:33 crc kubenswrapper[4768]: I1203 16:20:33.531240 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:33 crc kubenswrapper[4768]: I1203 16:20:33.531240 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:33 crc kubenswrapper[4768]: E1203 16:20:33.533093 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:33 crc kubenswrapper[4768]: E1203 16:20:33.534154 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:33 crc kubenswrapper[4768]: I1203 16:20:33.533126 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:33 crc kubenswrapper[4768]: E1203 16:20:33.534394 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:33 crc kubenswrapper[4768]: E1203 16:20:33.535234 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:33 crc kubenswrapper[4768]: E1203 16:20:33.643388 4768 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:20:34 crc kubenswrapper[4768]: I1203 16:20:34.531984 4768 scope.go:117] "RemoveContainer" containerID="3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.332036 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/3.log" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.337005 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerStarted","Data":"bb808d15e9e16ea9966c9c40bc58f993c43fa774e04642b783f3888bb420c016"} Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.337684 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.391662 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podStartSLOduration=109.391643095 podStartE2EDuration="1m49.391643095s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:35.389951399 +0000 UTC m=+132.309287872" watchObservedRunningTime="2025-12-03 16:20:35.391643095 +0000 UTC m=+132.310979538" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.531198 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.531265 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.531198 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.531222 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:35 crc kubenswrapper[4768]: E1203 16:20:35.531409 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:35 crc kubenswrapper[4768]: E1203 16:20:35.531492 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:35 crc kubenswrapper[4768]: E1203 16:20:35.531720 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:35 crc kubenswrapper[4768]: E1203 16:20:35.531793 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:35 crc kubenswrapper[4768]: I1203 16:20:35.959499 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-j25k6"] Dec 03 16:20:36 crc kubenswrapper[4768]: I1203 16:20:36.342273 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:36 crc kubenswrapper[4768]: E1203 16:20:36.342476 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:37 crc kubenswrapper[4768]: I1203 16:20:37.531219 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:37 crc kubenswrapper[4768]: I1203 16:20:37.531297 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:37 crc kubenswrapper[4768]: I1203 16:20:37.531369 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:37 crc kubenswrapper[4768]: E1203 16:20:37.531553 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:37 crc kubenswrapper[4768]: I1203 16:20:37.531896 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:37 crc kubenswrapper[4768]: E1203 16:20:37.531995 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:37 crc kubenswrapper[4768]: E1203 16:20:37.532219 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:37 crc kubenswrapper[4768]: E1203 16:20:37.532392 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:38 crc kubenswrapper[4768]: E1203 16:20:38.645640 4768 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:20:39 crc kubenswrapper[4768]: I1203 16:20:39.531471 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:39 crc kubenswrapper[4768]: I1203 16:20:39.531518 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:39 crc kubenswrapper[4768]: E1203 16:20:39.531692 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:39 crc kubenswrapper[4768]: I1203 16:20:39.531965 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:39 crc kubenswrapper[4768]: E1203 16:20:39.532072 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:39 crc kubenswrapper[4768]: E1203 16:20:39.532336 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:39 crc kubenswrapper[4768]: I1203 16:20:39.532564 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:39 crc kubenswrapper[4768]: E1203 16:20:39.532699 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:41 crc kubenswrapper[4768]: I1203 16:20:41.531652 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:41 crc kubenswrapper[4768]: I1203 16:20:41.531712 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:41 crc kubenswrapper[4768]: I1203 16:20:41.531721 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:41 crc kubenswrapper[4768]: E1203 16:20:41.531850 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:41 crc kubenswrapper[4768]: I1203 16:20:41.531887 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:41 crc kubenswrapper[4768]: E1203 16:20:41.532060 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:41 crc kubenswrapper[4768]: E1203 16:20:41.532196 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:41 crc kubenswrapper[4768]: E1203 16:20:41.532321 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:42 crc kubenswrapper[4768]: I1203 16:20:42.395467 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:20:43 crc kubenswrapper[4768]: I1203 16:20:43.531513 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:43 crc kubenswrapper[4768]: E1203 16:20:43.533046 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 03 16:20:43 crc kubenswrapper[4768]: I1203 16:20:43.533068 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:43 crc kubenswrapper[4768]: E1203 16:20:43.533192 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 03 16:20:43 crc kubenswrapper[4768]: I1203 16:20:43.533218 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:43 crc kubenswrapper[4768]: E1203 16:20:43.533446 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 03 16:20:43 crc kubenswrapper[4768]: I1203 16:20:43.533741 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:43 crc kubenswrapper[4768]: E1203 16:20:43.533937 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-j25k6" podUID="772886ae-dcfc-418e-ac82-49d7844c99f1" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.531429 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.531504 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.531509 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.531681 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.535184 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.535212 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.535283 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.535429 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.535528 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 16:20:45 crc kubenswrapper[4768]: I1203 16:20:45.535726 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.779586 4768 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.836949 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-45qqx"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.837767 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.841159 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.842388 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.852402 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.852702 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.852838 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.852723 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.853861 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.853891 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.854288 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.854363 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-ncgcs"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.870247 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ncgcs" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.875212 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-48hng"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.876100 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.881399 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.881872 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.883151 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.883246 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.889585 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.896006 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.896193 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.896281 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.896791 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.898650 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.898904 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.899490 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.900997 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.902661 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.907609 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.907769 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.907943 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.907959 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908061 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908068 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908064 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908342 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908441 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908539 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908782 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.908929 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.914813 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tfmcd"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.915661 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.916925 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-64v26"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.917311 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.917394 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.917985 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.919714 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29lwk"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.920139 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926020 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c12dd59-a489-414b-ae4b-670d3d4d4359-serving-cert\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926068 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zhhm\" (UniqueName: \"kubernetes.io/projected/78eaee69-d19e-4eb8-a6f1-317434219c61-kube-api-access-8zhhm\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926099 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rp7j8\" (UniqueName: \"kubernetes.io/projected/1420fe53-4382-42b7-a458-a21faa50b858-kube-api-access-rp7j8\") pod \"downloads-7954f5f757-ncgcs\" (UID: \"1420fe53-4382-42b7-a458-a21faa50b858\") " pod="openshift-console/downloads-7954f5f757-ncgcs" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926126 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t72rm\" (UniqueName: \"kubernetes.io/projected/3c12dd59-a489-414b-ae4b-670d3d4d4359-kube-api-access-t72rm\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926147 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/78eaee69-d19e-4eb8-a6f1-317434219c61-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926171 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926191 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/78eaee69-d19e-4eb8-a6f1-317434219c61-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926220 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-service-ca-bundle\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926245 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-config\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.926366 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/78eaee69-d19e-4eb8-a6f1-317434219c61-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.927968 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.928452 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.935558 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936016 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936300 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936424 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936450 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936552 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936662 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nqmfz"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.937258 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936676 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936718 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.937760 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.937939 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936726 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936775 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936816 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.936861 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.938223 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.938268 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.938618 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.938634 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.938645 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.941686 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.942629 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.942815 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.943313 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.943323 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dngkv"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.943660 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.943867 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.956828 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.957194 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.957392 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.958657 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.959433 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.959724 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.959724 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.962184 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.964360 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.964403 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.965131 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qwp9t"] Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.965290 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.974410 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.974907 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.975371 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.975560 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.975771 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.976305 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:48 crc kubenswrapper[4768]: I1203 16:20:48.976400 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dlkmg"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.002847 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.005106 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xmph5"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.005941 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-428d6"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.006457 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.005989 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.006457 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007356 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.006063 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.006586 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.006850 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007542 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007002 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007109 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007719 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007121 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007755 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007145 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007186 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007870 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.007915 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.008197 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.008299 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.008526 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.008644 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.008737 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.008856 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.009017 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.009069 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.009192 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.009289 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.009315 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.009803 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.011094 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.011296 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.011424 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.011532 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.011785 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.011847 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.012146 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.012652 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.012714 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.016155 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.016436 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.018450 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.019217 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.021144 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.021294 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hd7sc"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.021573 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.021897 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.022331 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.022911 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-45qqx"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.023218 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.023344 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.024031 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.025657 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026839 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cadb4efb-b28b-43fc-883f-6cf96d18af72-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026869 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026886 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026901 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026919 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026934 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9a95c84f-467d-4eaf-ad33-504a4d2661c0-auth-proxy-config\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026950 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-console-config\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026965 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svxkg\" (UniqueName: \"kubernetes.io/projected/57775487-facb-4956-a875-a740a48628c1-kube-api-access-svxkg\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.026988 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/78eaee69-d19e-4eb8-a6f1-317434219c61-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027006 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79abfb36-1dff-45e9-a2a7-f463284deffb-config\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027023 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027040 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-dir\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027056 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027071 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp5gr\" (UniqueName: \"kubernetes.io/projected/7a481797-5e7f-4ce2-b128-ab8062e625cd-kube-api-access-sp5gr\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027086 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbg2b\" (UniqueName: \"kubernetes.io/projected/2b325ed1-652c-4b16-9f58-04cc416148fd-kube-api-access-qbg2b\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027102 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3c0620ad-c62a-47b1-9044-ed61241e4a39-available-featuregates\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027119 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-config\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027173 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76323600-acfd-400b-b803-b3a05c114209-config\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027190 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-trusted-ca-bundle\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027206 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/78eaee69-d19e-4eb8-a6f1-317434219c61-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027222 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-audit-policies\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027264 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-service-ca\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027281 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027297 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnbgz\" (UniqueName: \"kubernetes.io/projected/3c0620ad-c62a-47b1-9044-ed61241e4a39-kube-api-access-mnbgz\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027315 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/76323600-acfd-400b-b803-b3a05c114209-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027331 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4901966-da0a-4bdf-ad9a-126056e3cbbf-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027347 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027362 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9a95c84f-467d-4eaf-ad33-504a4d2661c0-machine-approver-tls\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027377 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-client\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027394 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027412 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjxjw\" (UniqueName: \"kubernetes.io/projected/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-kube-api-access-bjxjw\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027425 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/355764ad-6eb2-4f34-a2be-3708f8ecf73b-serving-cert\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027446 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/78eaee69-d19e-4eb8-a6f1-317434219c61-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027461 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-config\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027483 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-etcd-client\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027496 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-config\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027514 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-config\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027528 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027549 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-etcd-serving-ca\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027572 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c12dd59-a489-414b-ae4b-670d3d4d4359-serving-cert\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027589 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97hnr\" (UniqueName: \"kubernetes.io/projected/cadb4efb-b28b-43fc-883f-6cf96d18af72-kube-api-access-97hnr\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027618 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76323600-acfd-400b-b803-b3a05c114209-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027633 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-779cz\" (UniqueName: \"kubernetes.io/projected/a4d07043-2618-451f-94a2-84f34aefb6ce-kube-api-access-779cz\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027649 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a481797-5e7f-4ce2-b128-ab8062e625cd-serving-cert\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027663 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm6dn\" (UniqueName: \"kubernetes.io/projected/9a95c84f-467d-4eaf-ad33-504a4d2661c0-kube-api-access-cm6dn\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027680 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rp7j8\" (UniqueName: \"kubernetes.io/projected/1420fe53-4382-42b7-a458-a21faa50b858-kube-api-access-rp7j8\") pod \"downloads-7954f5f757-ncgcs\" (UID: \"1420fe53-4382-42b7-a458-a21faa50b858\") " pod="openshift-console/downloads-7954f5f757-ncgcs" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027696 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-etcd-client\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027711 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-serving-cert\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027725 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfsgx\" (UniqueName: \"kubernetes.io/projected/355764ad-6eb2-4f34-a2be-3708f8ecf73b-kube-api-access-cfsgx\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027741 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/79abfb36-1dff-45e9-a2a7-f463284deffb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027756 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-client-ca\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027772 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t72rm\" (UniqueName: \"kubernetes.io/projected/3c12dd59-a489-414b-ae4b-670d3d4d4359-kube-api-access-t72rm\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027788 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-encryption-config\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027801 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-client-ca\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027814 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-oauth-config\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027830 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqm5d\" (UniqueName: \"kubernetes.io/projected/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-kube-api-access-kqm5d\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027846 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbc6j\" (UniqueName: \"kubernetes.io/projected/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-kube-api-access-nbc6j\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027860 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027875 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4d07043-2618-451f-94a2-84f34aefb6ce-serving-cert\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027888 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-policies\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027901 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a95c84f-467d-4eaf-ad33-504a4d2661c0-config\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027917 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027930 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-audit-dir\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027959 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027978 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-config\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.027994 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-service-ca-bundle\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028008 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028024 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-ca\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028038 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-oauth-serving-cert\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028053 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z826n\" (UniqueName: \"kubernetes.io/projected/f8be940a-94e8-4660-90d1-810511203a96-kube-api-access-z826n\") pod \"cluster-samples-operator-665b6dd947-fzskf\" (UID: \"f8be940a-94e8-4660-90d1-810511203a96\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028069 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-node-pullsecrets\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028084 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cadb4efb-b28b-43fc-883f-6cf96d18af72-config\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028108 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cadb4efb-b28b-43fc-883f-6cf96d18af72-images\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028123 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-serving-cert\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028138 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt265\" (UniqueName: \"kubernetes.io/projected/f4901966-da0a-4bdf-ad9a-126056e3cbbf-kube-api-access-pt265\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028155 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028169 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4901966-da0a-4bdf-ad9a-126056e3cbbf-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028185 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-audit-dir\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028199 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028214 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028236 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-image-import-ca\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028250 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79abfb36-1dff-45e9-a2a7-f463284deffb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028265 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8be940a-94e8-4660-90d1-810511203a96-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fzskf\" (UID: \"f8be940a-94e8-4660-90d1-810511203a96\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028280 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-encryption-config\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028293 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c0620ad-c62a-47b1-9044-ed61241e4a39-serving-cert\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028307 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-serving-cert\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028323 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zhhm\" (UniqueName: \"kubernetes.io/projected/78eaee69-d19e-4eb8-a6f1-317434219c61-kube-api-access-8zhhm\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028338 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-audit\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028357 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-service-ca\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.028376 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.029828 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/78eaee69-d19e-4eb8-a6f1-317434219c61-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.030047 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.030137 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-service-ca-bundle\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.030356 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3c12dd59-a489-414b-ae4b-670d3d4d4359-config\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.031155 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.031408 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.031745 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.047045 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.049432 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9zxmd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.049705 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.047880 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/78eaee69-d19e-4eb8-a6f1-317434219c61-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.050047 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.050333 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c12dd59-a489-414b-ae4b-670d3d4d4359-serving-cert\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.050070 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.051361 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.051927 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.052842 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.058547 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.059721 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.061147 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.061340 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.061750 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.062985 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.063795 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.065322 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.065831 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.065429 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.066438 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.068094 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.068765 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kwbcn"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.069396 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.069780 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.070611 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.070750 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.071436 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.071939 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-q8rgn"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.072369 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.075112 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.076054 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.077078 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.079655 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.079965 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-64v26"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.081227 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.083400 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.085758 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.090296 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nqmfz"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.093108 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.095426 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dlkmg"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.096612 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.100668 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-48hng"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.101179 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-p9rk5"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.101821 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ncgcs"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.101925 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.104942 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.109680 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-57fjg"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.110407 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qwp9t"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.110539 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.112053 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.113471 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.116517 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dngkv"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.116565 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9zxmd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.121577 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.121641 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.121653 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.125514 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.125518 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tfmcd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.125878 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29lwk"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.126810 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.128230 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.129038 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-stats-auth\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.129143 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.129210 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.129218 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.129288 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.129969 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130014 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-console-config\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130039 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-default-certificate\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130059 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfshv\" (UniqueName: \"kubernetes.io/projected/4fea816f-6c5d-4cb7-89e9-e40a95994f27-kube-api-access-bfshv\") pod \"dns-operator-744455d44c-dlkmg\" (UID: \"4fea816f-6c5d-4cb7-89e9-e40a95994f27\") " pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130079 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79abfb36-1dff-45e9-a2a7-f463284deffb-config\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130099 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-dir\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130119 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbg2b\" (UniqueName: \"kubernetes.io/projected/2b325ed1-652c-4b16-9f58-04cc416148fd-kube-api-access-qbg2b\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130141 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130160 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-config\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130177 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-trusted-ca-bundle\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130197 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-audit-policies\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130213 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130236 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnbgz\" (UniqueName: \"kubernetes.io/projected/3c0620ad-c62a-47b1-9044-ed61241e4a39-kube-api-access-mnbgz\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130258 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/76323600-acfd-400b-b803-b3a05c114209-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130279 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjxjw\" (UniqueName: \"kubernetes.io/projected/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-kube-api-access-bjxjw\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130297 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spj74\" (UniqueName: \"kubernetes.io/projected/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-kube-api-access-spj74\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130314 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-config\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130343 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-images\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130358 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a670660-f7f7-42af-91b1-b39b29b3d182-service-ca-bundle\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130377 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-config\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130392 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130418 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a481797-5e7f-4ce2-b128-ab8062e625cd-serving-cert\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130435 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130452 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm6dn\" (UniqueName: \"kubernetes.io/projected/9a95c84f-467d-4eaf-ad33-504a4d2661c0-kube-api-access-cm6dn\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130469 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130487 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkglq\" (UniqueName: \"kubernetes.io/projected/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-kube-api-access-hkglq\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130502 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-trusted-ca\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130516 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtfcs\" (UniqueName: \"kubernetes.io/projected/1a670660-f7f7-42af-91b1-b39b29b3d182-kube-api-access-vtfcs\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130537 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-etcd-client\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130555 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-serving-cert\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130571 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfsgx\" (UniqueName: \"kubernetes.io/projected/355764ad-6eb2-4f34-a2be-3708f8ecf73b-kube-api-access-cfsgx\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130587 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/79abfb36-1dff-45e9-a2a7-f463284deffb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130624 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-encryption-config\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130641 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqm5d\" (UniqueName: \"kubernetes.io/projected/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-kube-api-access-kqm5d\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130656 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4d07043-2618-451f-94a2-84f34aefb6ce-serving-cert\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130672 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-ca\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130689 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-oauth-serving-cert\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130707 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z826n\" (UniqueName: \"kubernetes.io/projected/f8be940a-94e8-4660-90d1-810511203a96-kube-api-access-z826n\") pod \"cluster-samples-operator-665b6dd947-fzskf\" (UID: \"f8be940a-94e8-4660-90d1-810511203a96\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130722 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cadb4efb-b28b-43fc-883f-6cf96d18af72-config\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130745 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt265\" (UniqueName: \"kubernetes.io/projected/f4901966-da0a-4bdf-ad9a-126056e3cbbf-kube-api-access-pt265\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130762 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4901966-da0a-4bdf-ad9a-126056e3cbbf-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130780 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f75f0a9-fec3-41c1-bda0-be2ef7485043-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kwbcn\" (UID: \"6f75f0a9-fec3-41c1-bda0-be2ef7485043\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130798 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8be940a-94e8-4660-90d1-810511203a96-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fzskf\" (UID: \"f8be940a-94e8-4660-90d1-810511203a96\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130818 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79abfb36-1dff-45e9-a2a7-f463284deffb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130835 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130876 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c0620ad-c62a-47b1-9044-ed61241e4a39-serving-cert\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130896 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-serving-cert\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130912 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-srv-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130930 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e2b9b0b-26da-45b0-b434-13aebc027dae-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130948 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130964 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130982 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cadb4efb-b28b-43fc-883f-6cf96d18af72-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130998 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131015 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131032 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9a95c84f-467d-4eaf-ad33-504a4d2661c0-auth-proxy-config\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131046 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svxkg\" (UniqueName: \"kubernetes.io/projected/57775487-facb-4956-a875-a740a48628c1-kube-api-access-svxkg\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131062 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131068 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79abfb36-1dff-45e9-a2a7-f463284deffb-config\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131079 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131139 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-serving-cert\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131178 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp5gr\" (UniqueName: \"kubernetes.io/projected/7a481797-5e7f-4ce2-b128-ab8062e625cd-kube-api-access-sp5gr\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131211 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-config\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131209 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131262 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3c0620ad-c62a-47b1-9044-ed61241e4a39-available-featuregates\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131280 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76323600-acfd-400b-b803-b3a05c114209-config\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131298 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm7cv\" (UniqueName: \"kubernetes.io/projected/8e2b9b0b-26da-45b0-b434-13aebc027dae-kube-api-access-dm7cv\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131318 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-service-ca\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131333 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6jhn\" (UniqueName: \"kubernetes.io/projected/f296dd0a-7f14-44f1-bd50-368fd1a9f430-kube-api-access-k6jhn\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131348 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-metrics-tls\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131369 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-metrics-certs\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131388 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4901966-da0a-4bdf-ad9a-126056e3cbbf-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131409 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131431 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9a95c84f-467d-4eaf-ad33-504a4d2661c0-machine-approver-tls\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131462 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-client\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131485 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131507 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4fea816f-6c5d-4cb7-89e9-e40a95994f27-metrics-tls\") pod \"dns-operator-744455d44c-dlkmg\" (UID: \"4fea816f-6c5d-4cb7-89e9-e40a95994f27\") " pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131558 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e2b9b0b-26da-45b0-b434-13aebc027dae-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131567 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131613 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/355764ad-6eb2-4f34-a2be-3708f8ecf73b-serving-cert\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131655 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-proxy-tls\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131661 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hd7sc"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131689 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-etcd-client\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131716 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-config\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131744 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-trusted-ca\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131768 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-etcd-serving-ca\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131814 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97hnr\" (UniqueName: \"kubernetes.io/projected/cadb4efb-b28b-43fc-883f-6cf96d18af72-kube-api-access-97hnr\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.131895 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76323600-acfd-400b-b803-b3a05c114209-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132402 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-779cz\" (UniqueName: \"kubernetes.io/projected/a4d07043-2618-451f-94a2-84f34aefb6ce-kube-api-access-779cz\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132441 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9b7x\" (UniqueName: \"kubernetes.io/projected/8443465b-6aa3-4c6c-8c57-035b962770b7-kube-api-access-x9b7x\") pod \"migrator-59844c95c7-k8zlz\" (UID: \"8443465b-6aa3-4c6c-8c57-035b962770b7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132468 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-config\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132505 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-client-ca\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132531 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f554q\" (UniqueName: \"kubernetes.io/projected/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-kube-api-access-f554q\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132563 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-client-ca\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132616 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-oauth-config\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132650 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bac0b68c-760d-404e-a211-aebd21955996-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132681 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbc6j\" (UniqueName: \"kubernetes.io/projected/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-kube-api-access-nbc6j\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132716 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132751 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzv9c\" (UniqueName: \"kubernetes.io/projected/6f75f0a9-fec3-41c1-bda0-be2ef7485043-kube-api-access-fzv9c\") pod \"multus-admission-controller-857f4d67dd-kwbcn\" (UID: \"6f75f0a9-fec3-41c1-bda0-be2ef7485043\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132790 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-policies\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132827 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a95c84f-467d-4eaf-ad33-504a4d2661c0-config\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132857 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-audit-dir\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132900 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132921 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kwbcn"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.132935 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133049 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133152 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-node-pullsecrets\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133186 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cadb4efb-b28b-43fc-883f-6cf96d18af72-images\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133248 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-serving-cert\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133272 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133369 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133431 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-audit-dir\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133453 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133502 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133528 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-image-import-ca\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133546 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-encryption-config\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133570 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qtfq\" (UniqueName: \"kubernetes.io/projected/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-kube-api-access-4qtfq\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.133925 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-audit-dir\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.134042 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-node-pullsecrets\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.134689 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3c0620ad-c62a-47b1-9044-ed61241e4a39-available-featuregates\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.134755 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-audit\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.134777 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-service-ca\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.134798 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bac0b68c-760d-404e-a211-aebd21955996-proxy-tls\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.134818 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9brdp\" (UniqueName: \"kubernetes.io/projected/bac0b68c-760d-404e-a211-aebd21955996-kube-api-access-9brdp\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.135056 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cadb4efb-b28b-43fc-883f-6cf96d18af72-images\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.135227 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-service-ca\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.135362 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.136218 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-audit\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.136860 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/9a95c84f-467d-4eaf-ad33-504a4d2661c0-auth-proxy-config\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.137744 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-trusted-ca-bundle\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.137750 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cadb4efb-b28b-43fc-883f-6cf96d18af72-config\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.137977 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4901966-da0a-4bdf-ad9a-126056e3cbbf-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.138079 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.138999 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-encryption-config\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.139223 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-oauth-serving-cert\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.139269 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-ca\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.139903 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.140110 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.140627 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-image-import-ca\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.140689 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.148886 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-serving-cert\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.140855 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4901966-da0a-4bdf-ad9a-126056e3cbbf-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130211 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-console-config\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.141148 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-audit-policies\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.141161 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a4d07043-2618-451f-94a2-84f34aefb6ce-serving-cert\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.130209 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-dir\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.141546 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-trusted-ca-bundle\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.142097 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-client-ca\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.142422 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-etcd-client\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.149151 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a481797-5e7f-4ce2-b128-ab8062e625cd-serving-cert\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.142641 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-config\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.143235 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-etcd-serving-ca\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.143200 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-client-ca\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.143517 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-config\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.146325 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/79abfb36-1dff-45e9-a2a7-f463284deffb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.146716 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.147000 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.147130 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a95c84f-467d-4eaf-ad33-504a4d2661c0-config\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.147183 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-audit-dir\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.147230 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/f8be940a-94e8-4660-90d1-810511203a96-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fzskf\" (UID: \"f8be940a-94e8-4660-90d1-810511203a96\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.147358 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.147928 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76323600-acfd-400b-b803-b3a05c114209-config\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.149382 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/9a95c84f-467d-4eaf-ad33-504a4d2661c0-machine-approver-tls\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.148728 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-serving-cert\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.140778 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-config\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.149684 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.149772 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.149900 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.149999 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-encryption-config\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.150130 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-serving-cert\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.150168 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.150176 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.150517 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-oauth-config\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.150555 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cadb4efb-b28b-43fc-883f-6cf96d18af72-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.150765 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-client\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.143234 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3c0620ad-c62a-47b1-9044-ed61241e4a39-serving-cert\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.151140 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-policies\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.151236 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/a4d07043-2618-451f-94a2-84f34aefb6ce-etcd-service-ca\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.153207 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.153496 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.153746 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-etcd-client\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.156934 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-v24rm"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.158154 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/355764ad-6eb2-4f34-a2be-3708f8ecf73b-serving-cert\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.158723 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.158963 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.160539 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/76323600-acfd-400b-b803-b3a05c114209-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.160874 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.161008 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.161765 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ngrx8"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.166105 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xmph5"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.166328 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.166765 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-p9rk5"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.170143 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.171807 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.172133 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.173176 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-v24rm"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.174456 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-q8rgn"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.175460 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ngrx8"] Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.179828 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.199728 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.219335 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236171 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-serving-cert\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236223 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236253 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm7cv\" (UniqueName: \"kubernetes.io/projected/8e2b9b0b-26da-45b0-b434-13aebc027dae-kube-api-access-dm7cv\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236281 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6jhn\" (UniqueName: \"kubernetes.io/projected/f296dd0a-7f14-44f1-bd50-368fd1a9f430-kube-api-access-k6jhn\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236306 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-metrics-tls\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236334 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-metrics-certs\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236362 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-proxy-tls\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236382 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4fea816f-6c5d-4cb7-89e9-e40a95994f27-metrics-tls\") pod \"dns-operator-744455d44c-dlkmg\" (UID: \"4fea816f-6c5d-4cb7-89e9-e40a95994f27\") " pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236404 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e2b9b0b-26da-45b0-b434-13aebc027dae-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236435 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-trusted-ca\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236467 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9b7x\" (UniqueName: \"kubernetes.io/projected/8443465b-6aa3-4c6c-8c57-035b962770b7-kube-api-access-x9b7x\") pod \"migrator-59844c95c7-k8zlz\" (UID: \"8443465b-6aa3-4c6c-8c57-035b962770b7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236521 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-config\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236556 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f554q\" (UniqueName: \"kubernetes.io/projected/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-kube-api-access-f554q\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236589 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bac0b68c-760d-404e-a211-aebd21955996-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236650 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzv9c\" (UniqueName: \"kubernetes.io/projected/6f75f0a9-fec3-41c1-bda0-be2ef7485043-kube-api-access-fzv9c\") pod \"multus-admission-controller-857f4d67dd-kwbcn\" (UID: \"6f75f0a9-fec3-41c1-bda0-be2ef7485043\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236693 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236737 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qtfq\" (UniqueName: \"kubernetes.io/projected/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-kube-api-access-4qtfq\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236766 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bac0b68c-760d-404e-a211-aebd21955996-proxy-tls\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236803 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9brdp\" (UniqueName: \"kubernetes.io/projected/bac0b68c-760d-404e-a211-aebd21955996-kube-api-access-9brdp\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236832 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236862 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236890 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-stats-auth\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236925 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-default-certificate\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236956 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfshv\" (UniqueName: \"kubernetes.io/projected/4fea816f-6c5d-4cb7-89e9-e40a95994f27-kube-api-access-bfshv\") pod \"dns-operator-744455d44c-dlkmg\" (UID: \"4fea816f-6c5d-4cb7-89e9-e40a95994f27\") " pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.236983 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237152 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spj74\" (UniqueName: \"kubernetes.io/projected/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-kube-api-access-spj74\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237232 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-images\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237257 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a670660-f7f7-42af-91b1-b39b29b3d182-service-ca-bundle\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237309 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237338 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkglq\" (UniqueName: \"kubernetes.io/projected/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-kube-api-access-hkglq\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237363 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-trusted-ca\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237387 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237454 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtfcs\" (UniqueName: \"kubernetes.io/projected/1a670660-f7f7-42af-91b1-b39b29b3d182-kube-api-access-vtfcs\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237553 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f75f0a9-fec3-41c1-bda0-be2ef7485043-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kwbcn\" (UID: \"6f75f0a9-fec3-41c1-bda0-be2ef7485043\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237658 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237694 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-srv-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.237722 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e2b9b0b-26da-45b0-b434-13aebc027dae-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.238143 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.239335 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bac0b68c-760d-404e-a211-aebd21955996-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.239509 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.241251 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4fea816f-6c5d-4cb7-89e9-e40a95994f27-metrics-tls\") pod \"dns-operator-744455d44c-dlkmg\" (UID: \"4fea816f-6c5d-4cb7-89e9-e40a95994f27\") " pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.260504 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.281896 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.300683 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.310728 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-stats-auth\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.320229 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.328381 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1a670660-f7f7-42af-91b1-b39b29b3d182-service-ca-bundle\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.341378 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.353584 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-metrics-certs\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.360542 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.379694 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.401674 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.412141 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/1a670660-f7f7-42af-91b1-b39b29b3d182-default-certificate\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.420011 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.439535 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.461457 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.501699 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.520215 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.539704 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.552838 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.560134 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.568755 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.579317 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.589247 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e2b9b0b-26da-45b0-b434-13aebc027dae-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.599913 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.621319 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.632276 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e2b9b0b-26da-45b0-b434-13aebc027dae-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.641020 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.642510 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:49 crc kubenswrapper[4768]: E1203 16:20:49.642758 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:22:51.642728123 +0000 UTC m=+268.562064586 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.643044 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.643330 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.643395 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.645099 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.648915 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.648960 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.660303 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.679794 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.687868 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-config\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.700289 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.721169 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.740400 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.745682 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.751101 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.752882 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-serving-cert\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.773239 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.773383 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.779767 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-trusted-ca\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.781725 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.788967 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.802007 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.827095 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/78eaee69-d19e-4eb8-a6f1-317434219c61-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.853457 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zhhm\" (UniqueName: \"kubernetes.io/projected/78eaee69-d19e-4eb8-a6f1-317434219c61-kube-api-access-8zhhm\") pod \"cluster-image-registry-operator-dc59b4c8b-rvgw8\" (UID: \"78eaee69-d19e-4eb8-a6f1-317434219c61\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.867927 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t72rm\" (UniqueName: \"kubernetes.io/projected/3c12dd59-a489-414b-ae4b-670d3d4d4359-kube-api-access-t72rm\") pod \"authentication-operator-69f744f599-45qqx\" (UID: \"3c12dd59-a489-414b-ae4b-670d3d4d4359\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.881013 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.881716 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rp7j8\" (UniqueName: \"kubernetes.io/projected/1420fe53-4382-42b7-a458-a21faa50b858-kube-api-access-rp7j8\") pod \"downloads-7954f5f757-ncgcs\" (UID: \"1420fe53-4382-42b7-a458-a21faa50b858\") " pod="openshift-console/downloads-7954f5f757-ncgcs" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.909362 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.955460 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.956013 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.960174 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 16:20:49 crc kubenswrapper[4768]: I1203 16:20:49.981695 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.003706 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.016649 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.028230 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.049238 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.053461 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.058887 4768 request.go:700] Waited for 1.002434511s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-dockercfg-5nsgg&limit=500&resourceVersion=0 Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.062906 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.080020 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.082255 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.095746 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.101130 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.117063 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-ncgcs" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.124557 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.140160 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 16:20:50 crc kubenswrapper[4768]: W1203 16:20:50.151297 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-911d9d339cb9a43f5839b842cde6e4898f1ce4102080a8e3534803c1ee5ef876 WatchSource:0}: Error finding container 911d9d339cb9a43f5839b842cde6e4898f1ce4102080a8e3534803c1ee5ef876: Status 404 returned error can't find the container with id 911d9d339cb9a43f5839b842cde6e4898f1ce4102080a8e3534803c1ee5ef876 Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.160321 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.175231 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/bac0b68c-760d-404e-a211-aebd21955996-proxy-tls\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.179991 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.203759 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.208111 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-images\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.220149 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.230749 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-proxy-tls\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.237275 4768 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.237358 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-profile-collector-cert podName:e0d3976c-fbd4-467f-bd38-5f9131f81ea7 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:50.73733896 +0000 UTC m=+147.656675383 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-profile-collector-cert") pod "olm-operator-6b444d44fb-hhzd9" (UID: "e0d3976c-fbd4-467f-bd38-5f9131f81ea7") : failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.237655 4768 secret.go:188] Couldn't get secret openshift-ingress-operator/metrics-tls: failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.237687 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-metrics-tls podName:1ed4ae0c-05b0-4b1f-82f6-876a922ee953 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:50.737677241 +0000 UTC m=+147.657013664 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-metrics-tls") pod "ingress-operator-5b745b69d9-pww5n" (UID: "1ed4ae0c-05b0-4b1f-82f6-876a922ee953") : failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.238784 4768 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.238865 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-srv-cert podName:e0d3976c-fbd4-467f-bd38-5f9131f81ea7 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:50.7388414 +0000 UTC m=+147.658177833 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-srv-cert") pod "olm-operator-6b444d44fb-hhzd9" (UID: "e0d3976c-fbd4-467f-bd38-5f9131f81ea7") : failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.240652 4768 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.240668 4768 configmap.go:193] Couldn't get configMap openshift-ingress-operator/trusted-ca: failed to sync configmap cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.240696 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f75f0a9-fec3-41c1-bda0-be2ef7485043-webhook-certs podName:6f75f0a9-fec3-41c1-bda0-be2ef7485043 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:50.740686041 +0000 UTC m=+147.660022464 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/6f75f0a9-fec3-41c1-bda0-be2ef7485043-webhook-certs") pod "multus-admission-controller-857f4d67dd-kwbcn" (UID: "6f75f0a9-fec3-41c1-bda0-be2ef7485043") : failed to sync secret cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: E1203 16:20:50.240714 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-trusted-ca podName:1ed4ae0c-05b0-4b1f-82f6-876a922ee953 nodeName:}" failed. No retries permitted until 2025-12-03 16:20:50.740703692 +0000 UTC m=+147.660040115 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca" (UniqueName: "kubernetes.io/configmap/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-trusted-ca") pod "ingress-operator-5b745b69d9-pww5n" (UID: "1ed4ae0c-05b0-4b1f-82f6-876a922ee953") : failed to sync configmap cache: timed out waiting for the condition Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.242671 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.260698 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.281127 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.307582 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.313430 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8"] Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.321306 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 16:20:50 crc kubenswrapper[4768]: W1203 16:20:50.328481 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-36ae666e8b8693e24dde7df56411763d8a664aa0b5962d0729f04853105996d2 WatchSource:0}: Error finding container 36ae666e8b8693e24dde7df56411763d8a664aa0b5962d0729f04853105996d2: Status 404 returned error can't find the container with id 36ae666e8b8693e24dde7df56411763d8a664aa0b5962d0729f04853105996d2 Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.344442 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.357716 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-ncgcs"] Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.360138 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.360852 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-45qqx"] Dec 03 16:20:50 crc kubenswrapper[4768]: W1203 16:20:50.365404 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3c12dd59_a489_414b_ae4b_670d3d4d4359.slice/crio-7c1f953df030d5b97a102f7db73b348422f70b152ebe1efe87a901d8e699c49b WatchSource:0}: Error finding container 7c1f953df030d5b97a102f7db73b348422f70b152ebe1efe87a901d8e699c49b: Status 404 returned error can't find the container with id 7c1f953df030d5b97a102f7db73b348422f70b152ebe1efe87a901d8e699c49b Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.380661 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.400428 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.401135 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" event={"ID":"3c12dd59-a489-414b-ae4b-670d3d4d4359","Type":"ContainerStarted","Data":"7c1f953df030d5b97a102f7db73b348422f70b152ebe1efe87a901d8e699c49b"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.402302 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" event={"ID":"78eaee69-d19e-4eb8-a6f1-317434219c61","Type":"ContainerStarted","Data":"b45d0af79ee4463fd501cda8701dc74059e7eb3ec5a42d8fe21ee296b1ac61d2"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.403321 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"36ae666e8b8693e24dde7df56411763d8a664aa0b5962d0729f04853105996d2"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.404817 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"74cb7a5430f8bcfbfe8332cd9433ce9d30d1c741819681685e2bceed5f6d4360"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.404854 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"911d9d339cb9a43f5839b842cde6e4898f1ce4102080a8e3534803c1ee5ef876"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.405954 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ncgcs" event={"ID":"1420fe53-4382-42b7-a458-a21faa50b858","Type":"ContainerStarted","Data":"547bae28c3122ca47fbe1b9e4f09c2748beaaa468068ba8efe9837015006e430"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.407394 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"189d2d580cd67062b3f936fe1ee65d2aee1075faafb642537a810d6fc34935f6"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.407430 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"80e1860cea417633777a0145e8c512ab24345d004c6a1762f1c9ffc60060fd4f"} Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.407932 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.420265 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.439440 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.459727 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.480767 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.500062 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.521036 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.540349 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.562249 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.582131 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.601474 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.622274 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.642361 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.660992 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.680628 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.707713 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.720754 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.740956 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.759907 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.780051 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-trusted-ca\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.780227 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f75f0a9-fec3-41c1-bda0-be2ef7485043-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kwbcn\" (UID: \"6f75f0a9-fec3-41c1-bda0-be2ef7485043\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.780295 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-srv-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.780370 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.780438 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-metrics-tls\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.781358 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.782200 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-trusted-ca\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.795492 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f75f0a9-fec3-41c1-bda0-be2ef7485043-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-kwbcn\" (UID: \"6f75f0a9-fec3-41c1-bda0-be2ef7485043\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.795838 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-metrics-tls\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.796368 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-srv-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.796541 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.821432 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.840662 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.860117 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.901725 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp5gr\" (UniqueName: \"kubernetes.io/projected/7a481797-5e7f-4ce2-b128-ab8062e625cd-kube-api-access-sp5gr\") pod \"controller-manager-879f6c89f-dngkv\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.924233 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/79abfb36-1dff-45e9-a2a7-f463284deffb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xf7dt\" (UID: \"79abfb36-1dff-45e9-a2a7-f463284deffb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.932450 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.941157 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbg2b\" (UniqueName: \"kubernetes.io/projected/2b325ed1-652c-4b16-9f58-04cc416148fd-kube-api-access-qbg2b\") pod \"oauth-openshift-558db77b4-29lwk\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.966373 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt265\" (UniqueName: \"kubernetes.io/projected/f4901966-da0a-4bdf-ad9a-126056e3cbbf-kube-api-access-pt265\") pod \"openshift-controller-manager-operator-756b6f6bc6-mmfqj\" (UID: \"f4901966-da0a-4bdf-ad9a-126056e3cbbf\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.985290 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqm5d\" (UniqueName: \"kubernetes.io/projected/68c07dfb-be0d-459f-bf5c-e15dc8472ae6-kube-api-access-kqm5d\") pod \"apiserver-7bbb656c7d-knwwz\" (UID: \"68c07dfb-be0d-459f-bf5c-e15dc8472ae6\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:50 crc kubenswrapper[4768]: I1203 16:20:50.999429 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svxkg\" (UniqueName: \"kubernetes.io/projected/57775487-facb-4956-a875-a740a48628c1-kube-api-access-svxkg\") pod \"console-f9d7485db-64v26\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.026394 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z826n\" (UniqueName: \"kubernetes.io/projected/f8be940a-94e8-4660-90d1-810511203a96-kube-api-access-z826n\") pod \"cluster-samples-operator-665b6dd947-fzskf\" (UID: \"f8be940a-94e8-4660-90d1-810511203a96\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.062987 4768 request.go:700] Waited for 1.922068986s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-apiserver/serviceaccounts/openshift-apiserver-sa/token Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.064487 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm6dn\" (UniqueName: \"kubernetes.io/projected/9a95c84f-467d-4eaf-ad33-504a4d2661c0-kube-api-access-cm6dn\") pod \"machine-approver-56656f9798-lgng9\" (UID: \"9a95c84f-467d-4eaf-ad33-504a4d2661c0\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.066970 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/76323600-acfd-400b-b803-b3a05c114209-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jgvc5\" (UID: \"76323600-acfd-400b-b803-b3a05c114209\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.082083 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjxjw\" (UniqueName: \"kubernetes.io/projected/b9e45680-e1b1-4cf4-8720-ebdff8a8ef55-kube-api-access-bjxjw\") pod \"apiserver-76f77b778f-tfmcd\" (UID: \"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55\") " pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.102036 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnbgz\" (UniqueName: \"kubernetes.io/projected/3c0620ad-c62a-47b1-9044-ed61241e4a39-kube-api-access-mnbgz\") pod \"openshift-config-operator-7777fb866f-48hng\" (UID: \"3c0620ad-c62a-47b1-9044-ed61241e4a39\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.109755 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.120927 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfsgx\" (UniqueName: \"kubernetes.io/projected/355764ad-6eb2-4f34-a2be-3708f8ecf73b-kube-api-access-cfsgx\") pod \"route-controller-manager-6576b87f9c-mkjqb\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.121429 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.137301 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.141025 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97hnr\" (UniqueName: \"kubernetes.io/projected/cadb4efb-b28b-43fc-883f-6cf96d18af72-kube-api-access-97hnr\") pod \"machine-api-operator-5694c8668f-nqmfz\" (UID: \"cadb4efb-b28b-43fc-883f-6cf96d18af72\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.160987 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-779cz\" (UniqueName: \"kubernetes.io/projected/a4d07043-2618-451f-94a2-84f34aefb6ce-kube-api-access-779cz\") pod \"etcd-operator-b45778765-qwp9t\" (UID: \"a4d07043-2618-451f-94a2-84f34aefb6ce\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.171618 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.180336 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.183780 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbc6j\" (UniqueName: \"kubernetes.io/projected/462653a7-03ca-4a9d-aeb7-476c0a97a5e1-kube-api-access-nbc6j\") pod \"openshift-apiserver-operator-796bbdcf4f-9wkh5\" (UID: \"462653a7-03ca-4a9d-aeb7-476c0a97a5e1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.192328 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dngkv"] Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.193985 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.193985 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.207502 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.207803 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.220319 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.225260 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.241410 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.261303 4768 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.280050 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.282172 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.287629 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.320407 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6jhn\" (UniqueName: \"kubernetes.io/projected/f296dd0a-7f14-44f1-bd50-368fd1a9f430-kube-api-access-k6jhn\") pod \"marketplace-operator-79b997595-9zxmd\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.324372 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.333879 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.335459 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm7cv\" (UniqueName: \"kubernetes.io/projected/8e2b9b0b-26da-45b0-b434-13aebc027dae-kube-api-access-dm7cv\") pod \"kube-storage-version-migrator-operator-b67b599dd-vprvd\" (UID: \"8e2b9b0b-26da-45b0-b434-13aebc027dae\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.354130 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.362990 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9b7x\" (UniqueName: \"kubernetes.io/projected/8443465b-6aa3-4c6c-8c57-035b962770b7-kube-api-access-x9b7x\") pod \"migrator-59844c95c7-k8zlz\" (UID: \"8443465b-6aa3-4c6c-8c57-035b962770b7\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.363171 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.379085 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f554q\" (UniqueName: \"kubernetes.io/projected/e0d3976c-fbd4-467f-bd38-5f9131f81ea7-kube-api-access-f554q\") pod \"olm-operator-6b444d44fb-hhzd9\" (UID: \"e0d3976c-fbd4-467f-bd38-5f9131f81ea7\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.416803 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzv9c\" (UniqueName: \"kubernetes.io/projected/6f75f0a9-fec3-41c1-bda0-be2ef7485043-kube-api-access-fzv9c\") pod \"multus-admission-controller-857f4d67dd-kwbcn\" (UID: \"6f75f0a9-fec3-41c1-bda0-be2ef7485043\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.422853 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.424333 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qtfq\" (UniqueName: \"kubernetes.io/projected/3f8bfc8b-163e-4915-b9af-9f4d55e5b16b-kube-api-access-4qtfq\") pod \"machine-config-operator-74547568cd-nfj8p\" (UID: \"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.438670 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.446864 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-ncgcs" event={"ID":"1420fe53-4382-42b7-a458-a21faa50b858","Type":"ContainerStarted","Data":"f6181cd84806d84624174fa3358c201e7be3b9dba863b8445102bfdc40e81104"} Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.447858 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-ncgcs" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.448523 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfshv\" (UniqueName: \"kubernetes.io/projected/4fea816f-6c5d-4cb7-89e9-e40a95994f27-kube-api-access-bfshv\") pod \"dns-operator-744455d44c-dlkmg\" (UID: \"4fea816f-6c5d-4cb7-89e9-e40a95994f27\") " pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.455989 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" event={"ID":"3c12dd59-a489-414b-ae4b-670d3d4d4359","Type":"ContainerStarted","Data":"90e0ddf44956179303172832d62e90c53975db5115366abd41ba8c423410b93b"} Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.456399 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9brdp\" (UniqueName: \"kubernetes.io/projected/bac0b68c-760d-404e-a211-aebd21955996-kube-api-access-9brdp\") pod \"machine-config-controller-84d6567774-pg7jr\" (UID: \"bac0b68c-760d-404e-a211-aebd21955996\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.458382 4768 patch_prober.go:28] interesting pod/downloads-7954f5f757-ncgcs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.458431 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ncgcs" podUID="1420fe53-4382-42b7-a458-a21faa50b858" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.458887 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" event={"ID":"9a95c84f-467d-4eaf-ad33-504a4d2661c0","Type":"ContainerStarted","Data":"46b6bafdebaf431c5e8481c2150dd0a3ee9f8be8eb68f95473e7bd2393ad8adb"} Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.459913 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" event={"ID":"7a481797-5e7f-4ce2-b128-ab8062e625cd","Type":"ContainerStarted","Data":"24523cf49cfbc1b05b718076c6af30bec41dbc289f992975cce795d3c4289d53"} Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.479346 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.496619 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" event={"ID":"78eaee69-d19e-4eb8-a6f1-317434219c61","Type":"ContainerStarted","Data":"5be2f6b2f557ec4d5e4ffde67bdf95f572aa46be37c625c785142510985d9699"} Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.499818 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-bound-sa-token\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.502189 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spj74\" (UniqueName: \"kubernetes.io/projected/1ed4ae0c-05b0-4b1f-82f6-876a922ee953-kube-api-access-spj74\") pod \"ingress-operator-5b745b69d9-pww5n\" (UID: \"1ed4ae0c-05b0-4b1f-82f6-876a922ee953\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.502218 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"cf1a77036f31ba46ba71bf65a5a50ba4a99b4e30208a06aa7c460356e6fabdd7"} Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.521309 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0baa4484-91f6-4a8a-ac52-52a5b6e1e194-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-kjfhz\" (UID: \"0baa4484-91f6-4a8a-ac52-52a5b6e1e194\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.540213 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtfcs\" (UniqueName: \"kubernetes.io/projected/1a670660-f7f7-42af-91b1-b39b29b3d182-kube-api-access-vtfcs\") pod \"router-default-5444994796-428d6\" (UID: \"1a670660-f7f7-42af-91b1-b39b29b3d182\") " pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.555251 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkglq\" (UniqueName: \"kubernetes.io/projected/04df1ccb-2b71-4fdc-bdc2-db70f206de1c-kube-api-access-hkglq\") pod \"console-operator-58897d9998-hd7sc\" (UID: \"04df1ccb-2b71-4fdc-bdc2-db70f206de1c\") " pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.594896 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595534 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mfxg\" (UniqueName: \"kubernetes.io/projected/47404af2-dbbc-4d33-8c76-488df7344cc9-kube-api-access-2mfxg\") pod \"package-server-manager-789f6589d5-f6csd\" (UID: \"47404af2-dbbc-4d33-8c76-488df7344cc9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595615 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4f0e2724-69c8-49a9-90ab-c548320def67-tmpfs\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595665 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bxsf\" (UniqueName: \"kubernetes.io/projected/4f0e2724-69c8-49a9-90ab-c548320def67-kube-api-access-7bxsf\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595687 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-bound-sa-token\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595725 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/25001558-bf0f-4b1c-9db4-b9c8ae6612b3-cert\") pod \"ingress-canary-p9rk5\" (UID: \"25001558-bf0f-4b1c-9db4-b9c8ae6612b3\") " pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595740 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ae0eb1d1-505d-4031-885d-652dea1526ea-secret-volume\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595767 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-sctcd\" (UID: \"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595786 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rghkx\" (UniqueName: \"kubernetes.io/projected/b96884af-41f9-4ce8-afec-b4d5d9a89572-kube-api-access-rghkx\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595880 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htbqf\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-kube-api-access-htbqf\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595928 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcf7w\" (UniqueName: \"kubernetes.io/projected/25001558-bf0f-4b1c-9db4-b9c8ae6612b3-kube-api-access-xcf7w\") pod \"ingress-canary-p9rk5\" (UID: \"25001558-bf0f-4b1c-9db4-b9c8ae6612b3\") " pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.595987 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hsx4c\" (UniqueName: \"kubernetes.io/projected/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-kube-api-access-hsx4c\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.596027 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/47404af2-dbbc-4d33-8c76-488df7344cc9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-f6csd\" (UID: \"47404af2-dbbc-4d33-8c76-488df7344cc9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.596047 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ae0eb1d1-505d-4031-885d-652dea1526ea-config-volume\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.596065 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/028c92ba-255e-47a9-9404-3ecbcb709029-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.596089 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-signing-cabundle\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.596129 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b855p\" (UniqueName: \"kubernetes.io/projected/ae0eb1d1-505d-4031-885d-652dea1526ea-kube-api-access-b855p\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.596208 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b96884af-41f9-4ce8-afec-b4d5d9a89572-serving-cert\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.600964 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-registry-certificates\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.602102 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-profile-collector-cert\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.602739 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdl2t\" (UniqueName: \"kubernetes.io/projected/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-kube-api-access-xdl2t\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.604295 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.607608 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-srv-cert\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.608265 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4f0e2724-69c8-49a9-90ab-c548320def67-webhook-cert\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.608862 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfmz4\" (UniqueName: \"kubernetes.io/projected/eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925-kube-api-access-mfmz4\") pod \"control-plane-machine-set-operator-78cbb6b69f-sctcd\" (UID: \"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.608947 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4f0e2724-69c8-49a9-90ab-c548320def67-apiservice-cert\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.609344 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b96884af-41f9-4ce8-afec-b4d5d9a89572-config\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.609477 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-registry-tls\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.609620 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/028c92ba-255e-47a9-9404-3ecbcb709029-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.609794 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.609961 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-signing-key\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.610064 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-trusted-ca\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: E1203 16:20:51.610653 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.110630998 +0000 UTC m=+149.029967421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.614724 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.624162 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.627560 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.662824 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.673015 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-64v26"] Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.673338 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.680916 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714259 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714522 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rghkx\" (UniqueName: \"kubernetes.io/projected/b96884af-41f9-4ce8-afec-b4d5d9a89572-kube-api-access-rghkx\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714584 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htbqf\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-kube-api-access-htbqf\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714657 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/044279d5-3fd0-4d9c-be40-65374222a598-metrics-tls\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714686 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcf7w\" (UniqueName: \"kubernetes.io/projected/25001558-bf0f-4b1c-9db4-b9c8ae6612b3-kube-api-access-xcf7w\") pod \"ingress-canary-p9rk5\" (UID: \"25001558-bf0f-4b1c-9db4-b9c8ae6612b3\") " pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714719 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-registration-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714826 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hsx4c\" (UniqueName: \"kubernetes.io/projected/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-kube-api-access-hsx4c\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714871 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/388fc6e0-918e-4984-8ab4-abf492bde36a-certs\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714905 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/47404af2-dbbc-4d33-8c76-488df7344cc9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-f6csd\" (UID: \"47404af2-dbbc-4d33-8c76-488df7344cc9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714930 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ae0eb1d1-505d-4031-885d-652dea1526ea-config-volume\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714956 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/028c92ba-255e-47a9-9404-3ecbcb709029-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.714977 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-signing-cabundle\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715010 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b855p\" (UniqueName: \"kubernetes.io/projected/ae0eb1d1-505d-4031-885d-652dea1526ea-kube-api-access-b855p\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715059 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b96884af-41f9-4ce8-afec-b4d5d9a89572-serving-cert\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715082 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-registry-certificates\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715115 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-profile-collector-cert\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715178 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdl2t\" (UniqueName: \"kubernetes.io/projected/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-kube-api-access-xdl2t\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715294 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-srv-cert\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715323 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-csi-data-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715347 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4f0e2724-69c8-49a9-90ab-c548320def67-webhook-cert\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715383 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jxb8\" (UniqueName: \"kubernetes.io/projected/044279d5-3fd0-4d9c-be40-65374222a598-kube-api-access-8jxb8\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715421 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfmz4\" (UniqueName: \"kubernetes.io/projected/eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925-kube-api-access-mfmz4\") pod \"control-plane-machine-set-operator-78cbb6b69f-sctcd\" (UID: \"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715443 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4f0e2724-69c8-49a9-90ab-c548320def67-apiservice-cert\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715465 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b96884af-41f9-4ce8-afec-b4d5d9a89572-config\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715499 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-registry-tls\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715520 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/028c92ba-255e-47a9-9404-3ecbcb709029-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715620 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhd68\" (UniqueName: \"kubernetes.io/projected/3220a3d3-0321-4863-b645-5b28949d7163-kube-api-access-hhd68\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715660 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-signing-key\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715704 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-trusted-ca\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715751 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvxml\" (UniqueName: \"kubernetes.io/projected/388fc6e0-918e-4984-8ab4-abf492bde36a-kube-api-access-nvxml\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715799 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mfxg\" (UniqueName: \"kubernetes.io/projected/47404af2-dbbc-4d33-8c76-488df7344cc9-kube-api-access-2mfxg\") pod \"package-server-manager-789f6589d5-f6csd\" (UID: \"47404af2-dbbc-4d33-8c76-488df7344cc9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715821 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-mountpoint-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715854 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/388fc6e0-918e-4984-8ab4-abf492bde36a-node-bootstrap-token\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715879 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4f0e2724-69c8-49a9-90ab-c548320def67-tmpfs\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715901 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-socket-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.715964 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044279d5-3fd0-4d9c-be40-65374222a598-config-volume\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.716030 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bxsf\" (UniqueName: \"kubernetes.io/projected/4f0e2724-69c8-49a9-90ab-c548320def67-kube-api-access-7bxsf\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.716069 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-bound-sa-token\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.716131 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/25001558-bf0f-4b1c-9db4-b9c8ae6612b3-cert\") pod \"ingress-canary-p9rk5\" (UID: \"25001558-bf0f-4b1c-9db4-b9c8ae6612b3\") " pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.716166 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ae0eb1d1-505d-4031-885d-652dea1526ea-secret-volume\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.716211 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-plugins-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.716238 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-sctcd\" (UID: \"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:51 crc kubenswrapper[4768]: E1203 16:20:51.717278 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.217245273 +0000 UTC m=+149.136581706 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.721390 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/4f0e2724-69c8-49a9-90ab-c548320def67-tmpfs\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.724449 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-registry-certificates\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.724891 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/028c92ba-255e-47a9-9404-3ecbcb709029-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.729041 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-signing-cabundle\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.731635 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b96884af-41f9-4ce8-afec-b4d5d9a89572-serving-cert\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.733033 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ae0eb1d1-505d-4031-885d-652dea1526ea-config-volume\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.739240 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-signing-key\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.741065 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b96884af-41f9-4ce8-afec-b4d5d9a89572-config\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.742201 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-sctcd\" (UID: \"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.742258 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/25001558-bf0f-4b1c-9db4-b9c8ae6612b3-cert\") pod \"ingress-canary-p9rk5\" (UID: \"25001558-bf0f-4b1c-9db4-b9c8ae6612b3\") " pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.742670 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-trusted-ca\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.743141 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/47404af2-dbbc-4d33-8c76-488df7344cc9-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-f6csd\" (UID: \"47404af2-dbbc-4d33-8c76-488df7344cc9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.742778 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/028c92ba-255e-47a9-9404-3ecbcb709029-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.747276 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.753280 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4f0e2724-69c8-49a9-90ab-c548320def67-apiservice-cert\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.754674 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ae0eb1d1-505d-4031-885d-652dea1526ea-secret-volume\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.759936 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-srv-cert\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.760308 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4f0e2724-69c8-49a9-90ab-c548320def67-webhook-cert\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.772770 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-registry-tls\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.772999 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-profile-collector-cert\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.776215 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rghkx\" (UniqueName: \"kubernetes.io/projected/b96884af-41f9-4ce8-afec-b4d5d9a89572-kube-api-access-rghkx\") pod \"service-ca-operator-777779d784-pv9d8\" (UID: \"b96884af-41f9-4ce8-afec-b4d5d9a89572\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.809076 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htbqf\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-kube-api-access-htbqf\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.818084 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcf7w\" (UniqueName: \"kubernetes.io/projected/25001558-bf0f-4b1c-9db4-b9c8ae6612b3-kube-api-access-xcf7w\") pod \"ingress-canary-p9rk5\" (UID: \"25001558-bf0f-4b1c-9db4-b9c8ae6612b3\") " pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.820824 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jxb8\" (UniqueName: \"kubernetes.io/projected/044279d5-3fd0-4d9c-be40-65374222a598-kube-api-access-8jxb8\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.820894 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.820926 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhd68\" (UniqueName: \"kubernetes.io/projected/3220a3d3-0321-4863-b645-5b28949d7163-kube-api-access-hhd68\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.820954 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvxml\" (UniqueName: \"kubernetes.io/projected/388fc6e0-918e-4984-8ab4-abf492bde36a-kube-api-access-nvxml\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.820989 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-mountpoint-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821012 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/388fc6e0-918e-4984-8ab4-abf492bde36a-node-bootstrap-token\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821037 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-socket-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821060 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044279d5-3fd0-4d9c-be40-65374222a598-config-volume\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821115 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-plugins-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821145 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/044279d5-3fd0-4d9c-be40-65374222a598-metrics-tls\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821174 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-registration-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821212 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/388fc6e0-918e-4984-8ab4-abf492bde36a-certs\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821268 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-csi-data-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.821443 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-csi-data-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: E1203 16:20:51.821929 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.321913544 +0000 UTC m=+149.241249967 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.822347 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-socket-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.822588 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-mountpoint-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.823278 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/044279d5-3fd0-4d9c-be40-65374222a598-config-volume\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.823341 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-plugins-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.823480 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mfxg\" (UniqueName: \"kubernetes.io/projected/47404af2-dbbc-4d33-8c76-488df7344cc9-kube-api-access-2mfxg\") pod \"package-server-manager-789f6589d5-f6csd\" (UID: \"47404af2-dbbc-4d33-8c76-488df7344cc9\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.824710 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/3220a3d3-0321-4863-b645-5b28949d7163-registration-dir\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.825648 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/388fc6e0-918e-4984-8ab4-abf492bde36a-node-bootstrap-token\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.828417 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/388fc6e0-918e-4984-8ab4-abf492bde36a-certs\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.838747 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hsx4c\" (UniqueName: \"kubernetes.io/projected/380e8e18-523c-4f4a-87d7-c2b94cd3ccdc-kube-api-access-hsx4c\") pod \"catalog-operator-68c6474976-p7lxc\" (UID: \"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.853508 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/044279d5-3fd0-4d9c-be40-65374222a598-metrics-tls\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.865156 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-tfmcd"] Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.867042 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdl2t\" (UniqueName: \"kubernetes.io/projected/b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9-kube-api-access-xdl2t\") pod \"service-ca-9c57cc56f-q8rgn\" (UID: \"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9\") " pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.888905 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bxsf\" (UniqueName: \"kubernetes.io/projected/4f0e2724-69c8-49a9-90ab-c548320def67-kube-api-access-7bxsf\") pod \"packageserver-d55dfcdfc-8j2nk\" (UID: \"4f0e2724-69c8-49a9-90ab-c548320def67\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.896188 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfmz4\" (UniqueName: \"kubernetes.io/projected/eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925-kube-api-access-mfmz4\") pod \"control-plane-machine-set-operator-78cbb6b69f-sctcd\" (UID: \"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.921212 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-bound-sa-token\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.922252 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:51 crc kubenswrapper[4768]: E1203 16:20:51.922614 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.422570602 +0000 UTC m=+149.341907015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.935934 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.939733 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b855p\" (UniqueName: \"kubernetes.io/projected/ae0eb1d1-505d-4031-885d-652dea1526ea-kube-api-access-b855p\") pod \"collect-profiles-29412975-rb4qq\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.943644 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:51 crc kubenswrapper[4768]: W1203 16:20:51.964925 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9e45680_e1b1_4cf4_8720_ebdff8a8ef55.slice/crio-8cafb7ee7747d32599a1864d60bf343fadfed6fef4bfb11745b3a08427c5e11b WatchSource:0}: Error finding container 8cafb7ee7747d32599a1864d60bf343fadfed6fef4bfb11745b3a08427c5e11b: Status 404 returned error can't find the container with id 8cafb7ee7747d32599a1864d60bf343fadfed6fef4bfb11745b3a08427c5e11b Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.982282 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jxb8\" (UniqueName: \"kubernetes.io/projected/044279d5-3fd0-4d9c-be40-65374222a598-kube-api-access-8jxb8\") pod \"dns-default-v24rm\" (UID: \"044279d5-3fd0-4d9c-be40-65374222a598\") " pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.990028 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" Dec 03 16:20:51 crc kubenswrapper[4768]: I1203 16:20:51.994845 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhd68\" (UniqueName: \"kubernetes.io/projected/3220a3d3-0321-4863-b645-5b28949d7163-kube-api-access-hhd68\") pod \"csi-hostpathplugin-ngrx8\" (UID: \"3220a3d3-0321-4863-b645-5b28949d7163\") " pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.002062 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.008758 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.023275 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.023628 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.523615702 +0000 UTC m=+149.442952125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.027832 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.034452 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvxml\" (UniqueName: \"kubernetes.io/projected/388fc6e0-918e-4984-8ab4-abf492bde36a-kube-api-access-nvxml\") pod \"machine-config-server-57fjg\" (UID: \"388fc6e0-918e-4984-8ab4-abf492bde36a\") " pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.034734 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.043583 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.053892 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-p9rk5" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.058618 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.058801 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-57fjg" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.063303 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.071999 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.077770 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29lwk"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.084291 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.124166 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.124647 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.624624242 +0000 UTC m=+149.543960665 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.225288 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.226494 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.72648212 +0000 UTC m=+149.645818543 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.326118 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.326426 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.826409793 +0000 UTC m=+149.745746206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.328402 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.331055 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-kwbcn"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.337113 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.347754 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.347818 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-nqmfz"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.356545 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.369477 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qwp9t"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.369522 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-48hng"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.373885 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9zxmd"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.380054 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.426719 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.427046 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:52.9270343 +0000 UTC m=+149.846370723 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.507497 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" event={"ID":"9a95c84f-467d-4eaf-ad33-504a4d2661c0","Type":"ContainerStarted","Data":"7ce75b3604a8e4df3fe891ed7795e374417a25be8741a4f1058ca87c02dbee70"} Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.509386 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-428d6" event={"ID":"1a670660-f7f7-42af-91b1-b39b29b3d182","Type":"ContainerStarted","Data":"380aea6f93b96bb42e35fc3a68c33c17ebd449003681c33e52ee02245d80c4b8"} Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.510834 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" event={"ID":"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55","Type":"ContainerStarted","Data":"8cafb7ee7747d32599a1864d60bf343fadfed6fef4bfb11745b3a08427c5e11b"} Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.512645 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" event={"ID":"7a481797-5e7f-4ce2-b128-ab8062e625cd","Type":"ContainerStarted","Data":"027a6396ba13c80c8deadd71610f55a1fa0a709eca3b06751ccaa29e8223d308"} Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.513540 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.515341 4768 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-dngkv container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.515381 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" podUID="7a481797-5e7f-4ce2-b128-ab8062e625cd" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.516574 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-64v26" event={"ID":"57775487-facb-4956-a875-a740a48628c1","Type":"ContainerStarted","Data":"8ffa292b27c90cb1df25907b8642bda3cc66e8d6bdbebdc80dd62f4975f85b21"} Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.517078 4768 patch_prober.go:28] interesting pod/downloads-7954f5f757-ncgcs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.517097 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ncgcs" podUID="1420fe53-4382-42b7-a458-a21faa50b858" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.527502 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.527661 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.027639526 +0000 UTC m=+149.946975949 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.527708 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.528018 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.028006218 +0000 UTC m=+149.947342641 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.538971 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.540867 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hd7sc"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.542148 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-dlkmg"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.560397 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.571770 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.581586 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.586295 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p"] Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.596453 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod76323600_acfd_400b_b803_b3a05c114209.slice/crio-c7694bba48788a3e6a74236ae87618cc34115ee053e31f7390b51bfda448d307 WatchSource:0}: Error finding container c7694bba48788a3e6a74236ae87618cc34115ee053e31f7390b51bfda448d307: Status 404 returned error can't find the container with id c7694bba48788a3e6a74236ae87618cc34115ee053e31f7390b51bfda448d307 Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.596543 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz"] Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.597947 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk"] Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.612911 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbac0b68c_760d_404e_a211_aebd21955996.slice/crio-08f5f4ff6d86d35a6e3d5418785f4f3ad9a54d9550736e01c72b77be5e49ef8c WatchSource:0}: Error finding container 08f5f4ff6d86d35a6e3d5418785f4f3ad9a54d9550736e01c72b77be5e49ef8c: Status 404 returned error can't find the container with id 08f5f4ff6d86d35a6e3d5418785f4f3ad9a54d9550736e01c72b77be5e49ef8c Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.614238 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f75f0a9_fec3_41c1_bda0_be2ef7485043.slice/crio-83505cedf70d76610699bac633c069eb0bd6c67b98782446daed7a8eb73c2c9f WatchSource:0}: Error finding container 83505cedf70d76610699bac633c069eb0bd6c67b98782446daed7a8eb73c2c9f: Status 404 returned error can't find the container with id 83505cedf70d76610699bac633c069eb0bd6c67b98782446daed7a8eb73c2c9f Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.614801 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd"] Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.615179 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod355764ad_6eb2_4f34_a2be_3708f8ecf73b.slice/crio-617fc88f1a023f3fb5fca8ae1f8402dda81594d8aa852a1314222b3d3159a682 WatchSource:0}: Error finding container 617fc88f1a023f3fb5fca8ae1f8402dda81594d8aa852a1314222b3d3159a682: Status 404 returned error can't find the container with id 617fc88f1a023f3fb5fca8ae1f8402dda81594d8aa852a1314222b3d3159a682 Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.615418 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcadb4efb_b28b_43fc_883f_6cf96d18af72.slice/crio-4529aaa463ca390b99479ffee925c5514e92ea5aaf8507de3cae49720cd3697d WatchSource:0}: Error finding container 4529aaa463ca390b99479ffee925c5514e92ea5aaf8507de3cae49720cd3697d: Status 404 returned error can't find the container with id 4529aaa463ca390b99479ffee925c5514e92ea5aaf8507de3cae49720cd3697d Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.617238 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0d3976c_fbd4_467f_bd38_5f9131f81ea7.slice/crio-d9a64cf7ce4a0dab14600a8a140a2295ab99ed9d2869e0cdb1538256180cc616 WatchSource:0}: Error finding container d9a64cf7ce4a0dab14600a8a140a2295ab99ed9d2869e0cdb1538256180cc616: Status 404 returned error can't find the container with id d9a64cf7ce4a0dab14600a8a140a2295ab99ed9d2869e0cdb1538256180cc616 Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.628344 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.629378 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.129361409 +0000 UTC m=+150.048697832 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.633847 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf296dd0a_7f14_44f1_bd50_368fd1a9f430.slice/crio-9b5d00c498e3cfef2e1259e89bad9d7a15cfb81150c56aac223aafa8e8ff5e96 WatchSource:0}: Error finding container 9b5d00c498e3cfef2e1259e89bad9d7a15cfb81150c56aac223aafa8e8ff5e96: Status 404 returned error can't find the container with id 9b5d00c498e3cfef2e1259e89bad9d7a15cfb81150c56aac223aafa8e8ff5e96 Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.634381 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4d07043_2618_451f_94a2_84f34aefb6ce.slice/crio-bbac0a4124f8493cd11c0c48fab70c51672d25ca6433b97574578624da744ae6 WatchSource:0}: Error finding container bbac0a4124f8493cd11c0c48fab70c51672d25ca6433b97574578624da744ae6: Status 404 returned error can't find the container with id bbac0a4124f8493cd11c0c48fab70c51672d25ca6433b97574578624da744ae6 Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.634646 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e2b9b0b_26da_45b0_b434_13aebc027dae.slice/crio-920d2b194f02f3b545f3c5d26f74ee602cdd609a7c08689a229aa672189f6332 WatchSource:0}: Error finding container 920d2b194f02f3b545f3c5d26f74ee602cdd609a7c08689a229aa672189f6332: Status 404 returned error can't find the container with id 920d2b194f02f3b545f3c5d26f74ee602cdd609a7c08689a229aa672189f6332 Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.729538 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.729927 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.229907903 +0000 UTC m=+150.149244336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.829955 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.830038 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.330022483 +0000 UTC m=+150.249358906 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.830104 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.830420 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.330413436 +0000 UTC m=+150.249749859 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.913234 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f0e2724_69c8_49a9_90ab_c548320def67.slice/crio-3205f0674ac63a36239f13d9afcadd6fa211458e7517d6433f10d494c43a52af WatchSource:0}: Error finding container 3205f0674ac63a36239f13d9afcadd6fa211458e7517d6433f10d494c43a52af: Status 404 returned error can't find the container with id 3205f0674ac63a36239f13d9afcadd6fa211458e7517d6433f10d494c43a52af Dec 03 16:20:52 crc kubenswrapper[4768]: W1203 16:20:52.917336 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8443465b_6aa3_4c6c_8c57_035b962770b7.slice/crio-381a986ba20fd79b92f11bf5668e03882203f7d837134521403eb83d91ddbc39 WatchSource:0}: Error finding container 381a986ba20fd79b92f11bf5668e03882203f7d837134521403eb83d91ddbc39: Status 404 returned error can't find the container with id 381a986ba20fd79b92f11bf5668e03882203f7d837134521403eb83d91ddbc39 Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.931835 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.931974 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.431955983 +0000 UTC m=+150.351292406 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:52 crc kubenswrapper[4768]: I1203 16:20:52.932151 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:52 crc kubenswrapper[4768]: E1203 16:20:52.932448 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.432434569 +0000 UTC m=+150.351770992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.029936 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-45qqx" podStartSLOduration=127.029914612 podStartE2EDuration="2m7.029914612s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:53.028387561 +0000 UTC m=+149.947723984" watchObservedRunningTime="2025-12-03 16:20:53.029914612 +0000 UTC m=+149.949251035" Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.032955 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.033652 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.533625705 +0000 UTC m=+150.452962148 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.082101 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rvgw8" podStartSLOduration=126.082078721 podStartE2EDuration="2m6.082078721s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:53.08113009 +0000 UTC m=+150.000466523" watchObservedRunningTime="2025-12-03 16:20:53.082078721 +0000 UTC m=+150.001415144" Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.134821 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.135237 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.635217753 +0000 UTC m=+150.554554176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.228079 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.235430 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.235585 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.735558801 +0000 UTC m=+150.654895224 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.235741 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.236077 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.736063988 +0000 UTC m=+150.655400411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.342579 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.343556 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.843534781 +0000 UTC m=+150.762871204 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.448392 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.448840 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:53.948821083 +0000 UTC m=+150.868157506 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.473929 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-ncgcs" podStartSLOduration=127.473905804 podStartE2EDuration="2m7.473905804s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:53.432840533 +0000 UTC m=+150.352176956" watchObservedRunningTime="2025-12-03 16:20:53.473905804 +0000 UTC m=+150.393242227" Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.550070 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.550374 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.05035861 +0000 UTC m=+150.969695033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.652363 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.652978 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.152961302 +0000 UTC m=+151.072297725 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.679178 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-v24rm"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.679221 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.686900 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" event={"ID":"1ed4ae0c-05b0-4b1f-82f6-876a922ee953","Type":"ContainerStarted","Data":"10df2664d6a92c3a4e8f0442d0dc997fdfa9aae96803532a1d3e22c9691fef25"} Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.704770 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ngrx8"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.725328 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" podStartSLOduration=126.725305131 podStartE2EDuration="2m6.725305131s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:53.721256097 +0000 UTC m=+150.640592530" watchObservedRunningTime="2025-12-03 16:20:53.725305131 +0000 UTC m=+150.644641554" Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.761150 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.761437 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.261422379 +0000 UTC m=+151.180758802 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.826646 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.834391 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-q8rgn"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.862159 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.864088 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.864471 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.364458695 +0000 UTC m=+151.283795118 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.868698 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-p9rk5"] Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.899465 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" event={"ID":"8443465b-6aa3-4c6c-8c57-035b962770b7","Type":"ContainerStarted","Data":"381a986ba20fd79b92f11bf5668e03882203f7d837134521403eb83d91ddbc39"} Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.911039 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" event={"ID":"f4901966-da0a-4bdf-ad9a-126056e3cbbf","Type":"ContainerStarted","Data":"9cf9adda332bc172bf2f09f8c2ed3ff3ce0a166f65687e99ddcda72877ce9ed4"} Dec 03 16:20:53 crc kubenswrapper[4768]: I1203 16:20:53.969126 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:53 crc kubenswrapper[4768]: E1203 16:20:53.969633 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.469610192 +0000 UTC m=+151.388946615 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.058828 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" event={"ID":"47404af2-dbbc-4d33-8c76-488df7344cc9","Type":"ContainerStarted","Data":"3057dc08cf083dc1da5fd17dfad94ae7a31f2a31418d15dc94ccf8a29896d81f"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.072872 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.073176 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.573164676 +0000 UTC m=+151.492501099 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.083476 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" event={"ID":"355764ad-6eb2-4f34-a2be-3708f8ecf73b","Type":"ContainerStarted","Data":"617fc88f1a023f3fb5fca8ae1f8402dda81594d8aa852a1314222b3d3159a682"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.086828 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:54 crc kubenswrapper[4768]: W1203 16:20:54.094755 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3220a3d3_0321_4863_b645_5b28949d7163.slice/crio-5d0d7650e416070835da942a6426b611ff1bb5b8c93829cc45958abb5ed4cf3c WatchSource:0}: Error finding container 5d0d7650e416070835da942a6426b611ff1bb5b8c93829cc45958abb5ed4cf3c: Status 404 returned error can't find the container with id 5d0d7650e416070835da942a6426b611ff1bb5b8c93829cc45958abb5ed4cf3c Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.098912 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" event={"ID":"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b","Type":"ContainerStarted","Data":"ca083f0cecc02f10ca7b85a7b6bb299ca84a4947067726112a741b66146a5d7e"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.104458 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" event={"ID":"cadb4efb-b28b-43fc-883f-6cf96d18af72","Type":"ContainerStarted","Data":"4529aaa463ca390b99479ffee925c5514e92ea5aaf8507de3cae49720cd3697d"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.111749 4768 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-mkjqb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" start-of-body= Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.111937 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" podUID="355764ad-6eb2-4f34-a2be-3708f8ecf73b" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.32:8443/healthz\": dial tcp 10.217.0.32:8443: connect: connection refused" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.127040 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" event={"ID":"8e2b9b0b-26da-45b0-b434-13aebc027dae","Type":"ContainerStarted","Data":"920d2b194f02f3b545f3c5d26f74ee602cdd609a7c08689a229aa672189f6332"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.129352 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" event={"ID":"3c0620ad-c62a-47b1-9044-ed61241e4a39","Type":"ContainerStarted","Data":"63377e6b46fb5e04ba98c704a17e4e780a3b3af7697e37f41692d145234366c2"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.179326 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" event={"ID":"6f75f0a9-fec3-41c1-bda0-be2ef7485043","Type":"ContainerStarted","Data":"83505cedf70d76610699bac633c069eb0bd6c67b98782446daed7a8eb73c2c9f"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.180220 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.181851 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.681825559 +0000 UTC m=+151.601162012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.189322 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" event={"ID":"a4d07043-2618-451f-94a2-84f34aefb6ce","Type":"ContainerStarted","Data":"bbac0a4124f8493cd11c0c48fab70c51672d25ca6433b97574578624da744ae6"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.198381 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" event={"ID":"79abfb36-1dff-45e9-a2a7-f463284deffb","Type":"ContainerStarted","Data":"04249b3e8e9567d7c8a4f4e577c86ea814eb3e7603bf6270d0e7f1f58443ccc7"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.237048 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" event={"ID":"2b325ed1-652c-4b16-9f58-04cc416148fd","Type":"ContainerStarted","Data":"a858a5ea35b18e1e83fbafc21a3186c346bd28d816ac0552e3e5a66a7350dd84"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.247005 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" event={"ID":"04df1ccb-2b71-4fdc-bdc2-db70f206de1c","Type":"ContainerStarted","Data":"744ae48db927a7fbc85f1e97876ba9dfb5eca5f6eede1a9ded4b947e0a4ab73a"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.247569 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.249401 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" event={"ID":"462653a7-03ca-4a9d-aeb7-476c0a97a5e1","Type":"ContainerStarted","Data":"d947b1834c890054f3c2ce7593f802a0e508478c74f09970c674a3f1da833e2d"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.251404 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" podStartSLOduration=127.251382886 podStartE2EDuration="2m7.251382886s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.251023284 +0000 UTC m=+151.170359707" watchObservedRunningTime="2025-12-03 16:20:54.251382886 +0000 UTC m=+151.170719309" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.265273 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" event={"ID":"4f0e2724-69c8-49a9-90ab-c548320def67","Type":"ContainerStarted","Data":"3205f0674ac63a36239f13d9afcadd6fa211458e7517d6433f10d494c43a52af"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.267412 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.281565 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" event={"ID":"f296dd0a-7f14-44f1-bd50-368fd1a9f430","Type":"ContainerStarted","Data":"9b5d00c498e3cfef2e1259e89bad9d7a15cfb81150c56aac223aafa8e8ff5e96"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.282376 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.282838 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.283918 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.783904654 +0000 UTC m=+151.703241077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.284553 4768 patch_prober.go:28] interesting pod/console-operator-58897d9998-hd7sc container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/readyz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.284626 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" podUID="04df1ccb-2b71-4fdc-bdc2-db70f206de1c" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/readyz\": dial tcp 10.217.0.30:8443: connect: connection refused" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.294734 4768 generic.go:334] "Generic (PLEG): container finished" podID="b9e45680-e1b1-4cf4-8720-ebdff8a8ef55" containerID="6c18b3a2a4c78f255cb8d65a1abf4112739f47653e3892ce328b0020f30bdee8" exitCode=0 Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.295103 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" event={"ID":"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55","Type":"ContainerDied","Data":"6c18b3a2a4c78f255cb8d65a1abf4112739f47653e3892ce328b0020f30bdee8"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.306876 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-428d6" event={"ID":"1a670660-f7f7-42af-91b1-b39b29b3d182","Type":"ContainerStarted","Data":"0942102bcb98b3ac55dce52ee45e702b8320f5a468dac263ff9c6dc38e13c03d"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.311384 4768 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9zxmd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.311618 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" podUID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.311813 4768 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-8j2nk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.17:5443/healthz\": dial tcp 10.217.0.17:5443: connect: connection refused" start-of-body= Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.311926 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" podUID="4f0e2724-69c8-49a9-90ab-c548320def67" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.17:5443/healthz\": dial tcp 10.217.0.17:5443: connect: connection refused" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.313371 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-57fjg" event={"ID":"388fc6e0-918e-4984-8ab4-abf492bde36a","Type":"ContainerStarted","Data":"8a0797d96382124a3cd4a9916ed36a4cae37c288d47c8c5143705d2f43f588a9"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.324361 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" podStartSLOduration=127.324340365 podStartE2EDuration="2m7.324340365s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.323038042 +0000 UTC m=+151.242374465" watchObservedRunningTime="2025-12-03 16:20:54.324340365 +0000 UTC m=+151.243676788" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.362651 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-64v26" event={"ID":"57775487-facb-4956-a875-a740a48628c1","Type":"ContainerStarted","Data":"717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.366933 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" event={"ID":"4fea816f-6c5d-4cb7-89e9-e40a95994f27","Type":"ContainerStarted","Data":"988a967f2c930db6fc46a87628a67ee919efc44d1e4058aad59ef071734d64ff"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.375857 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" event={"ID":"76323600-acfd-400b-b803-b3a05c114209","Type":"ContainerStarted","Data":"c7694bba48788a3e6a74236ae87618cc34115ee053e31f7390b51bfda448d307"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.384125 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.395042 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.895013509 +0000 UTC m=+151.814349932 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.429123 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-428d6" podStartSLOduration=127.429103039 podStartE2EDuration="2m7.429103039s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.425495589 +0000 UTC m=+151.344832002" watchObservedRunningTime="2025-12-03 16:20:54.429103039 +0000 UTC m=+151.348439462" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.430446 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" podStartSLOduration=128.430438473 podStartE2EDuration="2m8.430438473s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.376380321 +0000 UTC m=+151.295716744" watchObservedRunningTime="2025-12-03 16:20:54.430438473 +0000 UTC m=+151.349774896" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.441559 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" event={"ID":"bac0b68c-760d-404e-a211-aebd21955996","Type":"ContainerStarted","Data":"08f5f4ff6d86d35a6e3d5418785f4f3ad9a54d9550736e01c72b77be5e49ef8c"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.454446 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" event={"ID":"0baa4484-91f6-4a8a-ac52-52a5b6e1e194","Type":"ContainerStarted","Data":"31af44b5aa8c6a14c79f118c7b94f0000dc87108a1516a66a70bc3a973c27582"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.477563 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-57fjg" podStartSLOduration=6.477541135 podStartE2EDuration="6.477541135s" podCreationTimestamp="2025-12-03 16:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.455166223 +0000 UTC m=+151.374502646" watchObservedRunningTime="2025-12-03 16:20:54.477541135 +0000 UTC m=+151.396877558" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.485529 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.489615 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:54.989577394 +0000 UTC m=+151.908913817 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.492330 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" event={"ID":"e0d3976c-fbd4-467f-bd38-5f9131f81ea7","Type":"ContainerStarted","Data":"d9a64cf7ce4a0dab14600a8a140a2295ab99ed9d2869e0cdb1538256180cc616"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.492546 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.521817 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" podStartSLOduration=127.521801063 podStartE2EDuration="2m7.521801063s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.521051878 +0000 UTC m=+151.440388291" watchObservedRunningTime="2025-12-03 16:20:54.521801063 +0000 UTC m=+151.441137496" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.522648 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" event={"ID":"f8be940a-94e8-4660-90d1-810511203a96","Type":"ContainerStarted","Data":"74c50830babeaeb6de53551f180b86aafd686b4b33eaf369b4bfb037b1a07be7"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.526065 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.565880 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" event={"ID":"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc","Type":"ContainerStarted","Data":"d50126869e0537a19ad5fc41c0bac375f7de620225ca5d8c4d70ed13064877a1"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.591208 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.591813 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.091798114 +0000 UTC m=+152.011134537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.601804 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" event={"ID":"68c07dfb-be0d-459f-bf5c-e15dc8472ae6","Type":"ContainerStarted","Data":"d5906485a57cfa6849ba7c98cdddf906252262877658413da0c04aed0e0fb64f"} Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.605309 4768 patch_prober.go:28] interesting pod/downloads-7954f5f757-ncgcs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.605375 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ncgcs" podUID="1420fe53-4382-42b7-a458-a21faa50b858" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.605795 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.617782 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.640636 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" podStartSLOduration=128.640614833 podStartE2EDuration="2m8.640614833s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.58776131 +0000 UTC m=+151.507097733" watchObservedRunningTime="2025-12-03 16:20:54.640614833 +0000 UTC m=+151.559951256" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.653378 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:20:54 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:20:54 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:20:54 crc kubenswrapper[4768]: healthz check failed Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.653431 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.680570 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" podStartSLOduration=127.680542437 podStartE2EDuration="2m7.680542437s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.641222773 +0000 UTC m=+151.560559196" watchObservedRunningTime="2025-12-03 16:20:54.680542437 +0000 UTC m=+151.599878860" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.696678 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.697002 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.196989252 +0000 UTC m=+152.116325675 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.758158 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-64v26" podStartSLOduration=128.75814074 podStartE2EDuration="2m8.75814074s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.723214242 +0000 UTC m=+151.642550665" watchObservedRunningTime="2025-12-03 16:20:54.75814074 +0000 UTC m=+151.677477163" Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.798954 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.799125 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.299094448 +0000 UTC m=+152.218430871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.800675 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.800998 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.300984791 +0000 UTC m=+152.220321214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:54 crc kubenswrapper[4768]: I1203 16:20:54.902752 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:54 crc kubenswrapper[4768]: E1203 16:20:54.903067 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.403051635 +0000 UTC m=+152.322388058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.009326 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.010011 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.50999302 +0000 UTC m=+152.429329443 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.112641 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.113015 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.612998346 +0000 UTC m=+152.532334769 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.213761 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.214066 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.714056007 +0000 UTC m=+152.633392430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.315225 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.315433 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.815398848 +0000 UTC m=+152.734735271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.315850 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.316161 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.816148613 +0000 UTC m=+152.735485036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.416754 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.417010 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.916983096 +0000 UTC m=+152.836319519 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.431168 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.431540 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:55.931526778 +0000 UTC m=+152.850863201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.533091 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.533412 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.033383566 +0000 UTC m=+152.952719989 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.533959 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.534292 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.034277956 +0000 UTC m=+152.953614379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.632895 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:20:55 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:20:55 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:20:55 crc kubenswrapper[4768]: healthz check failed Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.632966 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.634294 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" event={"ID":"04df1ccb-2b71-4fdc-bdc2-db70f206de1c","Type":"ContainerStarted","Data":"e41d79e545e54a540c7e3c4f27b1e5de86c76a7f7d884bfd1dee5570d0588411"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.635886 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.636214 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.136197256 +0000 UTC m=+153.055533679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.640112 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-57fjg" event={"ID":"388fc6e0-918e-4984-8ab4-abf492bde36a","Type":"ContainerStarted","Data":"d340817add314f482c13a4d5e2c96dfc40810046f12cffff23d277d4f4b4cfc6"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.645430 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" event={"ID":"2b325ed1-652c-4b16-9f58-04cc416148fd","Type":"ContainerStarted","Data":"0d39dafb7ca9f3046fdff61c85b570b25f69fa4750e91d6763d15e56352111e2"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.646452 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.660888 4768 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-29lwk container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.27:6443/healthz\": dial tcp 10.217.0.27:6443: connect: connection refused" start-of-body= Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.660931 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.27:6443/healthz\": dial tcp 10.217.0.27:6443: connect: connection refused" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.677365 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" event={"ID":"47404af2-dbbc-4d33-8c76-488df7344cc9","Type":"ContainerStarted","Data":"b5fef1c14826a1c9319fd2277491ed781ff47a8ba98bd4d00088177b8d400bc4"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.688988 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-v24rm" event={"ID":"044279d5-3fd0-4d9c-be40-65374222a598","Type":"ContainerStarted","Data":"c5edb2056b15570fcb7b88057f46320d2ff6a6aa7f239f07f1b8747991f87388"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.689046 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-v24rm" event={"ID":"044279d5-3fd0-4d9c-be40-65374222a598","Type":"ContainerStarted","Data":"323274fe5d194c3b632acaa2d056b96b59b562ca0e4d54f765fbbe34c3d634e8"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.723866 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" event={"ID":"1ed4ae0c-05b0-4b1f-82f6-876a922ee953","Type":"ContainerStarted","Data":"898de90ad4cb0e25f75dd6864861a565d1d3145ee0dcb1d7f0cdd37dc7aa1a3d"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.726543 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" podStartSLOduration=128.726532781 podStartE2EDuration="2m8.726532781s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:54.759573687 +0000 UTC m=+151.678910110" watchObservedRunningTime="2025-12-03 16:20:55.726532781 +0000 UTC m=+152.645869204" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.739841 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.742779 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.242760499 +0000 UTC m=+153.162096922 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.746904 4768 generic.go:334] "Generic (PLEG): container finished" podID="3c0620ad-c62a-47b1-9044-ed61241e4a39" containerID="5cb5ea1897292f4c8c80073903e6813f3b3963e8d4ef1e33379d912ad65ca056" exitCode=0 Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.746988 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" event={"ID":"3c0620ad-c62a-47b1-9044-ed61241e4a39","Type":"ContainerStarted","Data":"e3c7bd016bec93f74054bd765dd584267cca9193390dc2cd3e7977c907869dfe"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.747019 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" event={"ID":"3c0620ad-c62a-47b1-9044-ed61241e4a39","Type":"ContainerDied","Data":"5cb5ea1897292f4c8c80073903e6813f3b3963e8d4ef1e33379d912ad65ca056"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.747761 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.775935 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" event={"ID":"79abfb36-1dff-45e9-a2a7-f463284deffb","Type":"ContainerStarted","Data":"ed70c76b36466449d9b7efb9e84bd55efda0beff7287382c87efd03806208d6c"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.782184 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" event={"ID":"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9","Type":"ContainerStarted","Data":"40a060e5a4a3cb416f7e5e982976a1598ca813c6c3f717ab5dbb1967c50fce34"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.793791 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" podStartSLOduration=129.793771721 podStartE2EDuration="2m9.793771721s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:55.728188186 +0000 UTC m=+152.647524599" watchObservedRunningTime="2025-12-03 16:20:55.793771721 +0000 UTC m=+152.713108144" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.795071 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" podStartSLOduration=129.795065954 podStartE2EDuration="2m9.795065954s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:55.78590585 +0000 UTC m=+152.705242273" watchObservedRunningTime="2025-12-03 16:20:55.795065954 +0000 UTC m=+152.714402377" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.819842 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9wkh5" event={"ID":"462653a7-03ca-4a9d-aeb7-476c0a97a5e1","Type":"ContainerStarted","Data":"af9094cea7ed81e5e9fbbf7cf06d87475a595eca301d939dd51c031f3d3f408f"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.843886 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.845025 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.34500916 +0000 UTC m=+153.264345583 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.849478 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" event={"ID":"bac0b68c-760d-404e-a211-aebd21955996","Type":"ContainerStarted","Data":"0e117611e5723ea2375d64a1d53016e4a8663b29374dac0d0460b601f05daca2"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.879899 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" event={"ID":"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b","Type":"ContainerStarted","Data":"b8889524f953afcbc7eea7c6c4eea38cdcd487f2f440508b6c2616b134c6bcb6"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.879954 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" event={"ID":"3f8bfc8b-163e-4915-b9af-9f4d55e5b16b","Type":"ContainerStarted","Data":"ffbb6d27151536e5ef13c747ec522c95187058389d21814feabd1383d0781cbc"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.903955 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" event={"ID":"f4901966-da0a-4bdf-ad9a-126056e3cbbf","Type":"ContainerStarted","Data":"6f7f0b3c644565694e770fb94a2b91d8b51ba1c163a9def72985f377ec5902c4"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.933927 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" event={"ID":"a4d07043-2618-451f-94a2-84f34aefb6ce","Type":"ContainerStarted","Data":"dc436282185a9bf417a769266f2178350c285d578498fc7e833505fa3a4acb3b"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.935478 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xf7dt" podStartSLOduration=128.935468679 podStartE2EDuration="2m8.935468679s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:55.858170266 +0000 UTC m=+152.777506689" watchObservedRunningTime="2025-12-03 16:20:55.935468679 +0000 UTC m=+152.854805102" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.946313 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:55 crc kubenswrapper[4768]: E1203 16:20:55.947551 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.44753863 +0000 UTC m=+153.366875053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.962947 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" event={"ID":"0baa4484-91f6-4a8a-ac52-52a5b6e1e194","Type":"ContainerStarted","Data":"408bbf57dbc86b9b4655694e0f83bbf80d5a1143cc824e13346dea70790a1379"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.971204 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-hd7sc" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.980255 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mmfqj" podStartSLOduration=128.980226814 podStartE2EDuration="2m8.980226814s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:55.979051055 +0000 UTC m=+152.898387478" watchObservedRunningTime="2025-12-03 16:20:55.980226814 +0000 UTC m=+152.899563237" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.980390 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nfj8p" podStartSLOduration=128.980384219 podStartE2EDuration="2m8.980384219s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:55.938124857 +0000 UTC m=+152.857461280" watchObservedRunningTime="2025-12-03 16:20:55.980384219 +0000 UTC m=+152.899720672" Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.994853 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" event={"ID":"ae0eb1d1-505d-4031-885d-652dea1526ea","Type":"ContainerStarted","Data":"194c8e29560223d8a34c284ca2439ab3ca804b9a6ebfb9921fd7f1ad2a619b65"} Dec 03 16:20:55 crc kubenswrapper[4768]: I1203 16:20:55.994907 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" event={"ID":"ae0eb1d1-505d-4031-885d-652dea1526ea","Type":"ContainerStarted","Data":"91009abb10f15c1e5d48c0923400b335b2536a84f1cde532df693240d3a25e7f"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.008830 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" event={"ID":"76323600-acfd-400b-b803-b3a05c114209","Type":"ContainerStarted","Data":"4306b0d01ce28bb5eee76b80e7d875da38eb232dd0eb21120c9e19b5a43cdbab"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.028930 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" event={"ID":"8443465b-6aa3-4c6c-8c57-035b962770b7","Type":"ContainerStarted","Data":"10067d7c72faa6d6f8ca5c66cabbcb7298621c6d43cc11cd877b995a6f97d029"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.032552 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.032622 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.038896 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" event={"ID":"4f0e2724-69c8-49a9-90ab-c548320def67","Type":"ContainerStarted","Data":"423c9e085bdac593ad9d580d246f539c8076631255cf03f980ce01f4d2e881f6"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.047163 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.048477 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.548458546 +0000 UTC m=+153.467794969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.048946 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-qwp9t" podStartSLOduration=129.048928182 podStartE2EDuration="2m9.048928182s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.023139827 +0000 UTC m=+152.942476250" watchObservedRunningTime="2025-12-03 16:20:56.048928182 +0000 UTC m=+152.968264605" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.050967 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-kjfhz" podStartSLOduration=129.050960339 podStartE2EDuration="2m9.050960339s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.047834516 +0000 UTC m=+152.967170939" watchObservedRunningTime="2025-12-03 16:20:56.050960339 +0000 UTC m=+152.970296762" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.059417 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" event={"ID":"380e8e18-523c-4f4a-87d7-c2b94cd3ccdc","Type":"ContainerStarted","Data":"541c1eb0fc1a5001eda3868bedf4b16abdc8b7b88cc08be0f9b17b63954c75d1"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.060490 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.068547 4768 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-p7lxc container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.068616 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" podUID="380e8e18-523c-4f4a-87d7-c2b94cd3ccdc" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.070168 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" event={"ID":"b96884af-41f9-4ce8-afec-b4d5d9a89572","Type":"ContainerStarted","Data":"901983c93ac865191af03d8a625bfb21e54a2304b263333b0f963e986dc8ff10"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.085957 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" event={"ID":"cadb4efb-b28b-43fc-883f-6cf96d18af72","Type":"ContainerStarted","Data":"7072ec776b5f04d90ef317903e7a674b24ac6567cc92af8ba39b538a4c755aef"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.113302 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" event={"ID":"6f75f0a9-fec3-41c1-bda0-be2ef7485043","Type":"ContainerStarted","Data":"0d122ad6c51d17fb7534b1a054dd2ea9d31a2ce31b93ae9b5a91f91586069edd"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.124430 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" event={"ID":"3220a3d3-0321-4863-b645-5b28949d7163","Type":"ContainerStarted","Data":"5d0d7650e416070835da942a6426b611ff1bb5b8c93829cc45958abb5ed4cf3c"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.146832 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" podStartSLOduration=130.146812858 podStartE2EDuration="2m10.146812858s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.082203695 +0000 UTC m=+153.001540118" watchObservedRunningTime="2025-12-03 16:20:56.146812858 +0000 UTC m=+153.066149281" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.149416 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.150376 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.650365885 +0000 UTC m=+153.569702308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.191770 4768 generic.go:334] "Generic (PLEG): container finished" podID="68c07dfb-be0d-459f-bf5c-e15dc8472ae6" containerID="948dd329374673cc61844fafd9473781a9988cd143ebe94c5c20dd217905a5a8" exitCode=0 Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.192211 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" event={"ID":"68c07dfb-be0d-459f-bf5c-e15dc8472ae6","Type":"ContainerDied","Data":"948dd329374673cc61844fafd9473781a9988cd143ebe94c5c20dd217905a5a8"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.203656 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jgvc5" podStartSLOduration=129.203642412 podStartE2EDuration="2m9.203642412s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.201960936 +0000 UTC m=+153.121297359" watchObservedRunningTime="2025-12-03 16:20:56.203642412 +0000 UTC m=+153.122978835" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.210906 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" event={"ID":"f8be940a-94e8-4660-90d1-810511203a96","Type":"ContainerStarted","Data":"a137877b5a13c5e35f85232b9c77dfb034fdd83a220b14e6affe3d69c82449dc"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.232312 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-vprvd" event={"ID":"8e2b9b0b-26da-45b0-b434-13aebc027dae","Type":"ContainerStarted","Data":"eb783f5392f137913f715561939c19d926efb060abe70f22e74f0b537eadbf44"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.238910 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" event={"ID":"355764ad-6eb2-4f34-a2be-3708f8ecf73b","Type":"ContainerStarted","Data":"aaa3aed05b42a80119e6b0b3881738addcd2ea0d6a72d6269e7f138b2730b60c"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.251187 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.252182 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.752161431 +0000 UTC m=+153.671497844 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.254181 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.263306 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hhzd9" event={"ID":"e0d3976c-fbd4-467f-bd38-5f9131f81ea7","Type":"ContainerStarted","Data":"348a5f0bd4f4849ba8c9d70d6e5077deac98c98371c094028cf4d1eb1a37f4b0"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.296590 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-p9rk5" event={"ID":"25001558-bf0f-4b1c-9db4-b9c8ae6612b3","Type":"ContainerStarted","Data":"4847bde6784f61424a0047222084dae9e6f16c08ffeb9c5a82754db731f42529"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.364432 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.366721 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.866701809 +0000 UTC m=+153.786038232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.391173 4768 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-9zxmd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" start-of-body= Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.391225 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" podUID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.22:8080/healthz\": dial tcp 10.217.0.22:8080: connect: connection refused" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.391506 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" event={"ID":"f296dd0a-7f14-44f1-bd50-368fd1a9f430","Type":"ContainerStarted","Data":"a7270c904125c21a4ac078512b8e8ad09de5b5c23e14202714b619424edb0548"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.397109 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-8j2nk" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.405283 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" event={"ID":"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925","Type":"ContainerStarted","Data":"db386eafb799baa51d015589ddc4126f482ea7329a1b3e92acff7fae50c1aa58"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.405328 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" event={"ID":"eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925","Type":"ContainerStarted","Data":"70b434ef8be75f53bf9bf65a4e691ee37e47f16eb3afb8d4fa64d1b9b2928660"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.425001 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" event={"ID":"4fea816f-6c5d-4cb7-89e9-e40a95994f27","Type":"ContainerStarted","Data":"9f7841b54c495ed60d8ea6ed4e33b96c9612dd0c8105df0e419f1606e5611f85"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.426994 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" podStartSLOduration=129.426982018 podStartE2EDuration="2m9.426982018s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.288486105 +0000 UTC m=+153.207822528" watchObservedRunningTime="2025-12-03 16:20:56.426982018 +0000 UTC m=+153.346318441" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.430654 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" event={"ID":"9a95c84f-467d-4eaf-ad33-504a4d2661c0","Type":"ContainerStarted","Data":"dadbcd90192024dd630da5196e192a51f54c0c5bff39c0ddd429f77543ec3e36"} Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.467193 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.467504 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.967476571 +0000 UTC m=+153.886812994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.467856 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.469473 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:56.969460197 +0000 UTC m=+153.888796620 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.540947 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-sctcd" podStartSLOduration=129.540926536 podStartE2EDuration="2m9.540926536s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.481906779 +0000 UTC m=+153.401243212" watchObservedRunningTime="2025-12-03 16:20:56.540926536 +0000 UTC m=+153.460262959" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.569767 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.572021 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-p9rk5" podStartSLOduration=8.571991387 podStartE2EDuration="8.571991387s" podCreationTimestamp="2025-12-03 16:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.562997948 +0000 UTC m=+153.482334371" watchObservedRunningTime="2025-12-03 16:20:56.571991387 +0000 UTC m=+153.491327810" Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.572143 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.072124461 +0000 UTC m=+153.991460884 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.617133 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:20:56 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:20:56 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:20:56 crc kubenswrapper[4768]: healthz check failed Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.617207 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.650140 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" podStartSLOduration=129.650121737 podStartE2EDuration="2m9.650121737s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.617915939 +0000 UTC m=+153.537252362" watchObservedRunningTime="2025-12-03 16:20:56.650121737 +0000 UTC m=+153.569458160" Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.671150 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.671426 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.171414973 +0000 UTC m=+154.090751396 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.773367 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.773808 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.273791798 +0000 UTC m=+154.193128221 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.875503 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.875987 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.375968796 +0000 UTC m=+154.295305219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:56 crc kubenswrapper[4768]: I1203 16:20:56.976977 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:56 crc kubenswrapper[4768]: E1203 16:20:56.977405 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.47738909 +0000 UTC m=+154.396725513 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.078798 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.079282 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.579261838 +0000 UTC m=+154.498598251 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.180272 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.180382 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.68036405 +0000 UTC m=+154.599700473 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.180679 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.181150 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.681130546 +0000 UTC m=+154.600466969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.281711 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.282020 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.782005391 +0000 UTC m=+154.701341814 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.382632 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.382959 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.882948258 +0000 UTC m=+154.802284681 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.462192 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" event={"ID":"8443465b-6aa3-4c6c-8c57-035b962770b7","Type":"ContainerStarted","Data":"d16955177aa65e358a37d3401be9eb84ba58ade6f0485fd0f5b78173ddc8f3f2"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.471348 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" event={"ID":"f8be940a-94e8-4660-90d1-810511203a96","Type":"ContainerStarted","Data":"94bd487ef94a2ba7bfaeee5ca9b3a3c9243d31a4c94f1723bd25e8c72c86935d"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.475545 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" event={"ID":"cadb4efb-b28b-43fc-883f-6cf96d18af72","Type":"ContainerStarted","Data":"8fd8bd325b65c72e695f58b109dac6c179c194b3621861069262f070271ead6e"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.480631 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" event={"ID":"bac0b68c-760d-404e-a211-aebd21955996","Type":"ContainerStarted","Data":"512dc8e8f99ab5c661753758e01655943e8a17eac3e6b89fd6ea4a010081b2df"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.483049 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.483300 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:57.983285815 +0000 UTC m=+154.902622238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.490121 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-v24rm" event={"ID":"044279d5-3fd0-4d9c-be40-65374222a598","Type":"ContainerStarted","Data":"44bf1667cd476980f19a55b80f67c960d2906ba190beecab7ecc1db872ae77d1"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.490635 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-v24rm" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.491770 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-lgng9" podStartSLOduration=131.491759526 podStartE2EDuration="2m11.491759526s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:56.689280476 +0000 UTC m=+153.608616899" watchObservedRunningTime="2025-12-03 16:20:57.491759526 +0000 UTC m=+154.411095949" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.496140 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" event={"ID":"b96884af-41f9-4ce8-afec-b4d5d9a89572","Type":"ContainerStarted","Data":"52234c75ba7fea121f394347af25ac42cb0e58ca0c92623ecb582864462df2af"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.501239 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-dlkmg" event={"ID":"4fea816f-6c5d-4cb7-89e9-e40a95994f27","Type":"ContainerStarted","Data":"38d94a10da7e1d7c815185c9c9309ace6a978a9cb78faa443e0e878b458c161a"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.506626 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" event={"ID":"47404af2-dbbc-4d33-8c76-488df7344cc9","Type":"ContainerStarted","Data":"f7ab5ce767b22f3ae0a04c2a775773601ae23ed5a28f95d2865afc06366406c6"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.507088 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.519608 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-nqmfz" podStartSLOduration=130.519572629 podStartE2EDuration="2m10.519572629s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.51598773 +0000 UTC m=+154.435324153" watchObservedRunningTime="2025-12-03 16:20:57.519572629 +0000 UTC m=+154.438909052" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.519981 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-k8zlz" podStartSLOduration=130.519977612 podStartE2EDuration="2m10.519977612s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.494745485 +0000 UTC m=+154.414081908" watchObservedRunningTime="2025-12-03 16:20:57.519977612 +0000 UTC m=+154.439314035" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.520261 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" event={"ID":"1ed4ae0c-05b0-4b1f-82f6-876a922ee953","Type":"ContainerStarted","Data":"8a9ef765821c56e084d64792f90e2447bbc329823f97d25b238e9fd316c58517"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.528169 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-p9rk5" event={"ID":"25001558-bf0f-4b1c-9db4-b9c8ae6612b3","Type":"ContainerStarted","Data":"8161b72dc04181184fcff4bfb78bda6c17fd840c8e62085b6283e45089530d9e"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.554315 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" event={"ID":"68c07dfb-be0d-459f-bf5c-e15dc8472ae6","Type":"ContainerStarted","Data":"cacb287d6052e1fa651e7fcc428f0e66d7527739a5a63952a4f0c58198159886"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.554357 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" event={"ID":"3220a3d3-0321-4863-b645-5b28949d7163","Type":"ContainerStarted","Data":"1cb7b37f20691b03830c9d73fa7d16541a695a099e5d9ccd8282e22c0bba937f"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.554367 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" event={"ID":"3220a3d3-0321-4863-b645-5b28949d7163","Type":"ContainerStarted","Data":"ce5c7b881f6766c3333a064d1473aac52176116de0dd843e74aba5b5dc14b193"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.554377 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" event={"ID":"b9289e0d-d7ba-4803-85f8-4a82e2cfd3d9","Type":"ContainerStarted","Data":"d3960d542bd461cc534f4a4739aac430aa4e391b0ee703ba516700afbe2e0aa3"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.554388 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" event={"ID":"6f75f0a9-fec3-41c1-bda0-be2ef7485043","Type":"ContainerStarted","Data":"24d36f35a07594aac16ce075c8511ddb147976aaf43c7cc50fd844e9f0282b46"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.572462 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" event={"ID":"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55","Type":"ContainerStarted","Data":"6050b7baff101141698eb58dbd7ab2e9d65fe53a4cad526c9bf5a01508a6d254"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.572508 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" event={"ID":"b9e45680-e1b1-4cf4-8720-ebdff8a8ef55","Type":"ContainerStarted","Data":"feffee24e1407b79f2387c0d431e8c4f4a06c1c4cea31087ce51466421abd0cc"} Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.589823 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.602380 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.102362284 +0000 UTC m=+155.021698707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.604390 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.616293 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p7lxc" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.644254 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-v24rm" podStartSLOduration=9.644236692 podStartE2EDuration="9.644236692s" podCreationTimestamp="2025-12-03 16:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.577841551 +0000 UTC m=+154.497177974" watchObservedRunningTime="2025-12-03 16:20:57.644236692 +0000 UTC m=+154.563573115" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.644357 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fzskf" podStartSLOduration=131.644352986 podStartE2EDuration="2m11.644352986s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.644077127 +0000 UTC m=+154.563413550" watchObservedRunningTime="2025-12-03 16:20:57.644352986 +0000 UTC m=+154.563689409" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.650334 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:20:57 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:20:57 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:20:57 crc kubenswrapper[4768]: healthz check failed Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.650436 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.692672 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.694487 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.194462228 +0000 UTC m=+155.113798651 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.718235 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.757693 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pg7jr" podStartSLOduration=130.757671374 podStartE2EDuration="2m10.757671374s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.707838571 +0000 UTC m=+154.627174984" watchObservedRunningTime="2025-12-03 16:20:57.757671374 +0000 UTC m=+154.677007797" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.759220 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-q8rgn" podStartSLOduration=130.759213765 podStartE2EDuration="2m10.759213765s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.747319411 +0000 UTC m=+154.666655824" watchObservedRunningTime="2025-12-03 16:20:57.759213765 +0000 UTC m=+154.678550178" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.802180 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.802549 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.302537632 +0000 UTC m=+155.221874055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.826916 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" podStartSLOduration=130.826892749 podStartE2EDuration="2m10.826892749s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.78410262 +0000 UTC m=+154.703439053" watchObservedRunningTime="2025-12-03 16:20:57.826892749 +0000 UTC m=+154.746229172" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.903685 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.903891 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.403861002 +0000 UTC m=+155.323197425 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.904007 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:57 crc kubenswrapper[4768]: E1203 16:20:57.904383 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.404373049 +0000 UTC m=+155.323709662 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.940712 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-pv9d8" podStartSLOduration=130.940684523 podStartE2EDuration="2m10.940684523s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.939894136 +0000 UTC m=+154.859230569" watchObservedRunningTime="2025-12-03 16:20:57.940684523 +0000 UTC m=+154.860020946" Dec 03 16:20:57 crc kubenswrapper[4768]: I1203 16:20:57.941835 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" podStartSLOduration=131.941826811 podStartE2EDuration="2m11.941826811s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:57.914805465 +0000 UTC m=+154.834141898" watchObservedRunningTime="2025-12-03 16:20:57.941826811 +0000 UTC m=+154.861163244" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.005511 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.005723 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.505692438 +0000 UTC m=+155.425028861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.005868 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.006194 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.506182415 +0000 UTC m=+155.425518838 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.016504 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-pww5n" podStartSLOduration=131.016485456 podStartE2EDuration="2m11.016485456s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:58.013835978 +0000 UTC m=+154.933172401" watchObservedRunningTime="2025-12-03 16:20:58.016485456 +0000 UTC m=+154.935821869" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.068449 4768 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.080749 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" podStartSLOduration=131.080733277 podStartE2EDuration="2m11.080733277s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:58.043642957 +0000 UTC m=+154.962979370" watchObservedRunningTime="2025-12-03 16:20:58.080733277 +0000 UTC m=+155.000069700" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.111081 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.111326 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.61129668 +0000 UTC m=+155.530633103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.111763 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.112088 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.612075876 +0000 UTC m=+155.531412299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.112536 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-kwbcn" podStartSLOduration=131.112513261 podStartE2EDuration="2m11.112513261s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:58.111386533 +0000 UTC m=+155.030722956" watchObservedRunningTime="2025-12-03 16:20:58.112513261 +0000 UTC m=+155.031849684" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.168671 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dnd7v"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.169530 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.173336 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.191962 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dnd7v"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.213237 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.213671 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.713651354 +0000 UTC m=+155.632987787 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.316481 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfzqt\" (UniqueName: \"kubernetes.io/projected/4de573e8-81fb-425f-ac06-4eed5a1f8e78-kube-api-access-hfzqt\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.316526 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-catalog-content\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.316549 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.316577 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-utilities\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.316902 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.816888708 +0000 UTC m=+155.736225131 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.334123 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nt5hc"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.334991 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.340572 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.347955 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nt5hc"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.417449 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.417659 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfzqt\" (UniqueName: \"kubernetes.io/projected/4de573e8-81fb-425f-ac06-4eed5a1f8e78-kube-api-access-hfzqt\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.417690 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-catalog-content\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.417722 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8qsb\" (UniqueName: \"kubernetes.io/projected/107b8214-a1ff-4bee-96ea-4e3e9c176635-kube-api-access-m8qsb\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.417751 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-utilities\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.417768 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-catalog-content\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.417783 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-utilities\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.417918 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:58.917903247 +0000 UTC m=+155.837239670 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.418667 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-catalog-content\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.418761 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-utilities\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.439161 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfzqt\" (UniqueName: \"kubernetes.io/projected/4de573e8-81fb-425f-ac06-4eed5a1f8e78-kube-api-access-hfzqt\") pod \"community-operators-dnd7v\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.511026 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.518737 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.518770 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8qsb\" (UniqueName: \"kubernetes.io/projected/107b8214-a1ff-4bee-96ea-4e3e9c176635-kube-api-access-m8qsb\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.518800 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-catalog-content\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.518818 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-utilities\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.519224 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-utilities\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.519431 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-03 16:20:59.019412263 +0000 UTC m=+155.938748686 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xmph5" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.519621 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-catalog-content\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.532417 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ckrtd"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.533272 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.544279 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ckrtd"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.546295 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8qsb\" (UniqueName: \"kubernetes.io/projected/107b8214-a1ff-4bee-96ea-4e3e9c176635-kube-api-access-m8qsb\") pod \"certified-operators-nt5hc\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.615401 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:20:58 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:20:58 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:20:58 crc kubenswrapper[4768]: healthz check failed Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.615478 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.621546 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.621847 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlzwj\" (UniqueName: \"kubernetes.io/projected/1f32e60f-f730-46ac-ab05-0ac46370495c-kube-api-access-qlzwj\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.621897 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-utilities\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.621960 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-catalog-content\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: E1203 16:20:58.622089 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-03 16:20:59.122073497 +0000 UTC m=+156.041409920 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.656465 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.691023 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" event={"ID":"3220a3d3-0321-4863-b645-5b28949d7163","Type":"ContainerStarted","Data":"886ddb364168d5ed53c8d3b071d393af535fe0e17319c7e59aa905c67f679adc"} Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.691070 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" event={"ID":"3220a3d3-0321-4863-b645-5b28949d7163","Type":"ContainerStarted","Data":"1162af51a877881c6e4c3b9f492de1c7785b21525202450f7f79b036b5388208"} Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.697579 4768 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-03T16:20:58.06847533Z","Handler":null,"Name":""} Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.709673 4768 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.709713 4768 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.720891 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" podStartSLOduration=10.720874243 podStartE2EDuration="10.720874243s" podCreationTimestamp="2025-12-03 16:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:58.718135942 +0000 UTC m=+155.637472385" watchObservedRunningTime="2025-12-03 16:20:58.720874243 +0000 UTC m=+155.640210656" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.723439 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlzwj\" (UniqueName: \"kubernetes.io/projected/1f32e60f-f730-46ac-ab05-0ac46370495c-kube-api-access-qlzwj\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.724176 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-utilities\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.724333 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.724656 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-catalog-content\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.727650 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-utilities\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.747109 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xjxcv"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.759067 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-catalog-content\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.765423 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-48hng" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.765531 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.772217 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlzwj\" (UniqueName: \"kubernetes.io/projected/1f32e60f-f730-46ac-ab05-0ac46370495c-kube-api-access-qlzwj\") pod \"community-operators-ckrtd\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.774661 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xjxcv"] Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.790323 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.790361 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.826542 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-catalog-content\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.826924 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7nt5\" (UniqueName: \"kubernetes.io/projected/3f74d79d-8026-4a3f-b910-d0245a90c975-kube-api-access-n7nt5\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.827039 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-utilities\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.918986 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.930372 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-catalog-content\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.930434 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7nt5\" (UniqueName: \"kubernetes.io/projected/3f74d79d-8026-4a3f-b910-d0245a90c975-kube-api-access-n7nt5\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.930469 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-utilities\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.930912 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-utilities\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.931040 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-catalog-content\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:58 crc kubenswrapper[4768]: I1203 16:20:58.962390 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7nt5\" (UniqueName: \"kubernetes.io/projected/3f74d79d-8026-4a3f-b910-d0245a90c975-kube-api-access-n7nt5\") pod \"certified-operators-xjxcv\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.011109 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dnd7v"] Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.015388 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nt5hc"] Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.068136 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xmph5\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.104344 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.106964 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.133216 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.300140 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.347812 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ckrtd"] Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.472892 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xmph5"] Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.542665 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.544502 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xjxcv"] Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.609914 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:20:59 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:20:59 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:20:59 crc kubenswrapper[4768]: healthz check failed Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.609967 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.697000 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" event={"ID":"028c92ba-255e-47a9-9404-3ecbcb709029","Type":"ContainerStarted","Data":"9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.697058 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" event={"ID":"028c92ba-255e-47a9-9404-3ecbcb709029","Type":"ContainerStarted","Data":"0d2cdbc339b59c184bfddfad1b3f7137c33d814781d672f07810b4dc6350b0b1"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.697135 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.699032 4768 generic.go:334] "Generic (PLEG): container finished" podID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerID="c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770" exitCode=0 Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.699098 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckrtd" event={"ID":"1f32e60f-f730-46ac-ab05-0ac46370495c","Type":"ContainerDied","Data":"c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.699128 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckrtd" event={"ID":"1f32e60f-f730-46ac-ab05-0ac46370495c","Type":"ContainerStarted","Data":"3179b86dd2db0ec3e3783331c46d2bf0d6e5db5510f7459fc8ad83a3f0244600"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.701576 4768 generic.go:334] "Generic (PLEG): container finished" podID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerID="f8172614e9d04285defb75f21040784ae76da1053f438c95e8188d2c5ece0e76" exitCode=0 Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.701651 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt5hc" event={"ID":"107b8214-a1ff-4bee-96ea-4e3e9c176635","Type":"ContainerDied","Data":"f8172614e9d04285defb75f21040784ae76da1053f438c95e8188d2c5ece0e76"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.701673 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt5hc" event={"ID":"107b8214-a1ff-4bee-96ea-4e3e9c176635","Type":"ContainerStarted","Data":"0e29314526e8ed160c084bd393cffdf76a295a4057bd55eba443db5f4df366f7"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.703536 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.704354 4768 generic.go:334] "Generic (PLEG): container finished" podID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerID="5e748e33a8e7bed9ee479b1895a711d8c003e9ae0ec55e1e1c0a21c3ef6910b1" exitCode=0 Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.704404 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjxcv" event={"ID":"3f74d79d-8026-4a3f-b910-d0245a90c975","Type":"ContainerDied","Data":"5e748e33a8e7bed9ee479b1895a711d8c003e9ae0ec55e1e1c0a21c3ef6910b1"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.704426 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjxcv" event={"ID":"3f74d79d-8026-4a3f-b910-d0245a90c975","Type":"ContainerStarted","Data":"1ac3fdbb85ce0f8195e828166558f0c7ddc10230847417f50e32a1150468a347"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.710961 4768 generic.go:334] "Generic (PLEG): container finished" podID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerID="f6fc37b13102abc589d535c53e590a8462a2343cbe92ddf4e61ea889013db9c3" exitCode=0 Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.711182 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dnd7v" event={"ID":"4de573e8-81fb-425f-ac06-4eed5a1f8e78","Type":"ContainerDied","Data":"f6fc37b13102abc589d535c53e590a8462a2343cbe92ddf4e61ea889013db9c3"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.711260 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dnd7v" event={"ID":"4de573e8-81fb-425f-ac06-4eed5a1f8e78","Type":"ContainerStarted","Data":"698c583040d03b1c7c0f26a5da136e1b3c26f758eba5aa1e4eedb038e9c949e4"} Dec 03 16:20:59 crc kubenswrapper[4768]: I1203 16:20:59.727006 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" podStartSLOduration=132.726979126 podStartE2EDuration="2m12.726979126s" podCreationTimestamp="2025-12-03 16:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:20:59.725865099 +0000 UTC m=+156.645201522" watchObservedRunningTime="2025-12-03 16:20:59.726979126 +0000 UTC m=+156.646315549" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.118813 4768 patch_prober.go:28] interesting pod/downloads-7954f5f757-ncgcs container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.119315 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-ncgcs" podUID="1420fe53-4382-42b7-a458-a21faa50b858" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.119360 4768 patch_prober.go:28] interesting pod/downloads-7954f5f757-ncgcs container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" start-of-body= Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.119439 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-ncgcs" podUID="1420fe53-4382-42b7-a458-a21faa50b858" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.6:8080/\": dial tcp 10.217.0.6:8080: connect: connection refused" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.328328 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-5xt59"] Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.329345 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.330986 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.340755 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5xt59"] Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.490042 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-utilities\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.490299 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gstdj\" (UniqueName: \"kubernetes.io/projected/f033c93d-ce31-465a-8466-049ff04809ff-kube-api-access-gstdj\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.490331 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-catalog-content\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.591254 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gstdj\" (UniqueName: \"kubernetes.io/projected/f033c93d-ce31-465a-8466-049ff04809ff-kube-api-access-gstdj\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.591331 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-catalog-content\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.591390 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-utilities\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.591934 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-catalog-content\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.591986 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-utilities\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.608684 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:21:00 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:21:00 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:21:00 crc kubenswrapper[4768]: healthz check failed Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.608756 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.620164 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gstdj\" (UniqueName: \"kubernetes.io/projected/f033c93d-ce31-465a-8466-049ff04809ff-kube-api-access-gstdj\") pod \"redhat-marketplace-5xt59\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.695042 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.728646 4768 generic.go:334] "Generic (PLEG): container finished" podID="ae0eb1d1-505d-4031-885d-652dea1526ea" containerID="194c8e29560223d8a34c284ca2439ab3ca804b9a6ebfb9921fd7f1ad2a619b65" exitCode=0 Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.730091 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" event={"ID":"ae0eb1d1-505d-4031-885d-652dea1526ea","Type":"ContainerDied","Data":"194c8e29560223d8a34c284ca2439ab3ca804b9a6ebfb9921fd7f1ad2a619b65"} Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.760947 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tk84c"] Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.762406 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.768134 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk84c"] Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.895878 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-utilities\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.895985 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ghwv\" (UniqueName: \"kubernetes.io/projected/b2cff238-c110-484d-8a20-dc2ac6b132ec-kube-api-access-9ghwv\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.896030 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-catalog-content\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.997249 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-utilities\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.997320 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ghwv\" (UniqueName: \"kubernetes.io/projected/b2cff238-c110-484d-8a20-dc2ac6b132ec-kube-api-access-9ghwv\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.997350 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-catalog-content\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.997935 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-catalog-content\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:00 crc kubenswrapper[4768]: I1203 16:21:00.998172 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-utilities\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.016678 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-5xt59"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.038796 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ghwv\" (UniqueName: \"kubernetes.io/projected/b2cff238-c110-484d-8a20-dc2ac6b132ec-kube-api-access-9ghwv\") pod \"redhat-marketplace-tk84c\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:01 crc kubenswrapper[4768]: W1203 16:21:01.053793 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf033c93d_ce31_465a_8466_049ff04809ff.slice/crio-868677f54aef8258632743b390e6b76857f15078a1938a4978b801f82af37696 WatchSource:0}: Error finding container 868677f54aef8258632743b390e6b76857f15078a1938a4978b801f82af37696: Status 404 returned error can't find the container with id 868677f54aef8258632743b390e6b76857f15078a1938a4978b801f82af37696 Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.110682 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.114408 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.114450 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.122866 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.123784 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.127022 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.128466 4768 patch_prober.go:28] interesting pod/console-f9d7485db-64v26 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.128545 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-64v26" podUID="57775487-facb-4956-a875-a740a48628c1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.8:8443/health\": dial tcp 10.217.0.8:8443: connect: connection refused" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.210241 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.210293 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.216619 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.264446 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.265136 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.275161 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.275228 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.278061 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.330716 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tpql2"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.331937 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.334497 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.344581 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tpql2"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.403124 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af7698ed-1b09-4969-b352-efe8df4b1cde-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.403207 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af7698ed-1b09-4969-b352-efe8df4b1cde-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.450496 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk84c"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.504992 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-utilities\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.505059 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af7698ed-1b09-4969-b352-efe8df4b1cde-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.505091 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rvfq\" (UniqueName: \"kubernetes.io/projected/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-kube-api-access-2rvfq\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.505137 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-catalog-content\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.505162 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af7698ed-1b09-4969-b352-efe8df4b1cde-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.505179 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af7698ed-1b09-4969-b352-efe8df4b1cde-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.527570 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zx57v"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.528582 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.538210 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af7698ed-1b09-4969-b352-efe8df4b1cde-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.548432 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zx57v"] Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.605534 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.606353 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-utilities\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.606413 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rvfq\" (UniqueName: \"kubernetes.io/projected/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-kube-api-access-2rvfq\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.606445 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-catalog-content\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.606850 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-catalog-content\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.607300 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-utilities\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.610747 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:21:01 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:21:01 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:21:01 crc kubenswrapper[4768]: healthz check failed Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.610801 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.619190 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.632650 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rvfq\" (UniqueName: \"kubernetes.io/projected/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-kube-api-access-2rvfq\") pod \"redhat-operators-tpql2\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.650868 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.708760 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgk96\" (UniqueName: \"kubernetes.io/projected/a8be6842-cdb8-4cb3-8df2-51eb10011545-kube-api-access-lgk96\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.708821 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-utilities\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.708852 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-catalog-content\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.736374 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk84c" event={"ID":"b2cff238-c110-484d-8a20-dc2ac6b132ec","Type":"ContainerStarted","Data":"afbfd5b606024f3fbbe57109c8620507c39b089aaa648d8298ca270f3012ff10"} Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.737973 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xt59" event={"ID":"f033c93d-ce31-465a-8466-049ff04809ff","Type":"ContainerStarted","Data":"868677f54aef8258632743b390e6b76857f15078a1938a4978b801f82af37696"} Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.742427 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-tfmcd" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.743264 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-knwwz" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.811176 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgk96\" (UniqueName: \"kubernetes.io/projected/a8be6842-cdb8-4cb3-8df2-51eb10011545-kube-api-access-lgk96\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.811237 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-utilities\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.811266 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-catalog-content\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.812406 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-catalog-content\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.814822 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-utilities\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.849011 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgk96\" (UniqueName: \"kubernetes.io/projected/a8be6842-cdb8-4cb3-8df2-51eb10011545-kube-api-access-lgk96\") pod \"redhat-operators-zx57v\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.867185 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:21:01 crc kubenswrapper[4768]: I1203 16:21:01.951295 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.136807 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tpql2"] Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.237516 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.239342 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zx57v"] Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.435275 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ae0eb1d1-505d-4031-885d-652dea1526ea-secret-volume\") pod \"ae0eb1d1-505d-4031-885d-652dea1526ea\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.435347 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b855p\" (UniqueName: \"kubernetes.io/projected/ae0eb1d1-505d-4031-885d-652dea1526ea-kube-api-access-b855p\") pod \"ae0eb1d1-505d-4031-885d-652dea1526ea\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.435392 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ae0eb1d1-505d-4031-885d-652dea1526ea-config-volume\") pod \"ae0eb1d1-505d-4031-885d-652dea1526ea\" (UID: \"ae0eb1d1-505d-4031-885d-652dea1526ea\") " Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.436359 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae0eb1d1-505d-4031-885d-652dea1526ea-config-volume" (OuterVolumeSpecName: "config-volume") pod "ae0eb1d1-505d-4031-885d-652dea1526ea" (UID: "ae0eb1d1-505d-4031-885d-652dea1526ea"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.441629 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0eb1d1-505d-4031-885d-652dea1526ea-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ae0eb1d1-505d-4031-885d-652dea1526ea" (UID: "ae0eb1d1-505d-4031-885d-652dea1526ea"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.441722 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae0eb1d1-505d-4031-885d-652dea1526ea-kube-api-access-b855p" (OuterVolumeSpecName: "kube-api-access-b855p") pod "ae0eb1d1-505d-4031-885d-652dea1526ea" (UID: "ae0eb1d1-505d-4031-885d-652dea1526ea"). InnerVolumeSpecName "kube-api-access-b855p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.537101 4768 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ae0eb1d1-505d-4031-885d-652dea1526ea-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.537150 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b855p\" (UniqueName: \"kubernetes.io/projected/ae0eb1d1-505d-4031-885d-652dea1526ea-kube-api-access-b855p\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.537163 4768 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ae0eb1d1-505d-4031-885d-652dea1526ea-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.611261 4768 patch_prober.go:28] interesting pod/router-default-5444994796-428d6 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 03 16:21:02 crc kubenswrapper[4768]: [-]has-synced failed: reason withheld Dec 03 16:21:02 crc kubenswrapper[4768]: [+]process-running ok Dec 03 16:21:02 crc kubenswrapper[4768]: healthz check failed Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.611337 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-428d6" podUID="1a670660-f7f7-42af-91b1-b39b29b3d182" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.773070 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" event={"ID":"ae0eb1d1-505d-4031-885d-652dea1526ea","Type":"ContainerDied","Data":"91009abb10f15c1e5d48c0923400b335b2536a84f1cde532df693240d3a25e7f"} Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.773110 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.773118 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91009abb10f15c1e5d48c0923400b335b2536a84f1cde532df693240d3a25e7f" Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.775533 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx57v" event={"ID":"a8be6842-cdb8-4cb3-8df2-51eb10011545","Type":"ContainerStarted","Data":"b9b691a53169a9501a52b2e0b7ee617a2375db900b15a46ff9e74ef02117b386"} Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.777210 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af7698ed-1b09-4969-b352-efe8df4b1cde","Type":"ContainerStarted","Data":"b3e0d556738b90c26c08ef8873432be2fa67cf727edc38581f24ae87b96845d0"} Dec 03 16:21:02 crc kubenswrapper[4768]: I1203 16:21:02.779396 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tpql2" event={"ID":"b5ac3fee-d987-40bd-afd8-c85d8c0311e7","Type":"ContainerStarted","Data":"92608616385aebe48ae747bbfc488c78932d98b6a742c8a9c8283ef2007ad1b8"} Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.608486 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.611170 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-428d6" Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.793801 4768 generic.go:334] "Generic (PLEG): container finished" podID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerID="9518b8d02f852c0919daa22a13b80e8c601fd17867c2e708c0c08a9c849e9de7" exitCode=0 Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.793872 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tpql2" event={"ID":"b5ac3fee-d987-40bd-afd8-c85d8c0311e7","Type":"ContainerDied","Data":"9518b8d02f852c0919daa22a13b80e8c601fd17867c2e708c0c08a9c849e9de7"} Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.795709 4768 generic.go:334] "Generic (PLEG): container finished" podID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerID="088d0a17fe9662af75729905993f88b607e60d4674250a5c81fca93811946dc2" exitCode=0 Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.795781 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx57v" event={"ID":"a8be6842-cdb8-4cb3-8df2-51eb10011545","Type":"ContainerDied","Data":"088d0a17fe9662af75729905993f88b607e60d4674250a5c81fca93811946dc2"} Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.799467 4768 generic.go:334] "Generic (PLEG): container finished" podID="f033c93d-ce31-465a-8466-049ff04809ff" containerID="b3f6104c93d94461c95761f436c7ba89aa21c4dde84d5fbe8006bdddab8009c9" exitCode=0 Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.799507 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xt59" event={"ID":"f033c93d-ce31-465a-8466-049ff04809ff","Type":"ContainerDied","Data":"b3f6104c93d94461c95761f436c7ba89aa21c4dde84d5fbe8006bdddab8009c9"} Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.801556 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af7698ed-1b09-4969-b352-efe8df4b1cde","Type":"ContainerStarted","Data":"4d0599a14940136f4e2a2d99ed019d5b7af6b77653033df3231265389c3b0c22"} Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.803201 4768 generic.go:334] "Generic (PLEG): container finished" podID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerID="c5d071ab2ec5be3ad81387b5319ac32943aeb6444938d2b2eac6e86e86896198" exitCode=0 Dec 03 16:21:03 crc kubenswrapper[4768]: I1203 16:21:03.804217 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk84c" event={"ID":"b2cff238-c110-484d-8a20-dc2ac6b132ec","Type":"ContainerDied","Data":"c5d071ab2ec5be3ad81387b5319ac32943aeb6444938d2b2eac6e86e86896198"} Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.812673 4768 generic.go:334] "Generic (PLEG): container finished" podID="af7698ed-1b09-4969-b352-efe8df4b1cde" containerID="4d0599a14940136f4e2a2d99ed019d5b7af6b77653033df3231265389c3b0c22" exitCode=0 Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.812782 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af7698ed-1b09-4969-b352-efe8df4b1cde","Type":"ContainerDied","Data":"4d0599a14940136f4e2a2d99ed019d5b7af6b77653033df3231265389c3b0c22"} Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.925435 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 16:21:04 crc kubenswrapper[4768]: E1203 16:21:04.926169 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0eb1d1-505d-4031-885d-652dea1526ea" containerName="collect-profiles" Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.926262 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0eb1d1-505d-4031-885d-652dea1526ea" containerName="collect-profiles" Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.926405 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0eb1d1-505d-4031-885d-652dea1526ea" containerName="collect-profiles" Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.926826 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.929332 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.929774 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 03 16:21:04 crc kubenswrapper[4768]: I1203 16:21:04.942617 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 16:21:05 crc kubenswrapper[4768]: I1203 16:21:05.076415 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:05 crc kubenswrapper[4768]: I1203 16:21:05.076504 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:05 crc kubenswrapper[4768]: I1203 16:21:05.178007 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:05 crc kubenswrapper[4768]: I1203 16:21:05.178075 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:05 crc kubenswrapper[4768]: I1203 16:21:05.178537 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:05 crc kubenswrapper[4768]: I1203 16:21:05.197553 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:05 crc kubenswrapper[4768]: I1203 16:21:05.258656 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:07 crc kubenswrapper[4768]: I1203 16:21:07.076482 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-v24rm" Dec 03 16:21:09 crc kubenswrapper[4768]: I1203 16:21:09.855631 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:21:09 crc kubenswrapper[4768]: I1203 16:21:09.864765 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/772886ae-dcfc-418e-ac82-49d7844c99f1-metrics-certs\") pod \"network-metrics-daemon-j25k6\" (UID: \"772886ae-dcfc-418e-ac82-49d7844c99f1\") " pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:21:10 crc kubenswrapper[4768]: I1203 16:21:10.141327 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-ncgcs" Dec 03 16:21:10 crc kubenswrapper[4768]: I1203 16:21:10.157372 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-j25k6" Dec 03 16:21:11 crc kubenswrapper[4768]: I1203 16:21:11.360058 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:21:11 crc kubenswrapper[4768]: I1203 16:21:11.365048 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:21:19 crc kubenswrapper[4768]: I1203 16:21:19.115111 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:21:26 crc kubenswrapper[4768]: I1203 16:21:26.028383 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:21:26 crc kubenswrapper[4768]: I1203 16:21:26.029728 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:21:29 crc kubenswrapper[4768]: I1203 16:21:29.885414 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 03 16:21:30 crc kubenswrapper[4768]: I1203 16:21:30.163863 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:30 crc kubenswrapper[4768]: I1203 16:21:30.210240 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af7698ed-1b09-4969-b352-efe8df4b1cde-kube-api-access\") pod \"af7698ed-1b09-4969-b352-efe8df4b1cde\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " Dec 03 16:21:30 crc kubenswrapper[4768]: I1203 16:21:30.210351 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af7698ed-1b09-4969-b352-efe8df4b1cde-kubelet-dir\") pod \"af7698ed-1b09-4969-b352-efe8df4b1cde\" (UID: \"af7698ed-1b09-4969-b352-efe8df4b1cde\") " Dec 03 16:21:30 crc kubenswrapper[4768]: I1203 16:21:30.210768 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/af7698ed-1b09-4969-b352-efe8df4b1cde-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "af7698ed-1b09-4969-b352-efe8df4b1cde" (UID: "af7698ed-1b09-4969-b352-efe8df4b1cde"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:21:30 crc kubenswrapper[4768]: I1203 16:21:30.221292 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af7698ed-1b09-4969-b352-efe8df4b1cde-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "af7698ed-1b09-4969-b352-efe8df4b1cde" (UID: "af7698ed-1b09-4969-b352-efe8df4b1cde"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:21:30 crc kubenswrapper[4768]: I1203 16:21:30.329131 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/af7698ed-1b09-4969-b352-efe8df4b1cde-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:30 crc kubenswrapper[4768]: I1203 16:21:30.329586 4768 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/af7698ed-1b09-4969-b352-efe8df4b1cde-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:31 crc kubenswrapper[4768]: I1203 16:21:31.001667 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"af7698ed-1b09-4969-b352-efe8df4b1cde","Type":"ContainerDied","Data":"b3e0d556738b90c26c08ef8873432be2fa67cf727edc38581f24ae87b96845d0"} Dec 03 16:21:31 crc kubenswrapper[4768]: I1203 16:21:31.001710 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3e0d556738b90c26c08ef8873432be2fa67cf727edc38581f24ae87b96845d0" Dec 03 16:21:31 crc kubenswrapper[4768]: I1203 16:21:31.001779 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 03 16:21:31 crc kubenswrapper[4768]: I1203 16:21:31.950943 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-f6csd" Dec 03 16:21:32 crc kubenswrapper[4768]: E1203 16:21:32.477377 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894: Get \"https://registry.redhat.io/v2/redhat/redhat-marketplace-index/blobs/sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894\": context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 03 16:21:32 crc kubenswrapper[4768]: E1203 16:21:32.477675 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9ghwv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-tk84c_openshift-marketplace(b2cff238-c110-484d-8a20-dc2ac6b132ec): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894: Get \"https://registry.redhat.io/v2/redhat/redhat-marketplace-index/blobs/sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894\": context canceled" logger="UnhandledError" Dec 03 16:21:32 crc kubenswrapper[4768]: E1203 16:21:32.478882 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894: Get \\\"https://registry.redhat.io/v2/redhat/redhat-marketplace-index/blobs/sha256:b45b4080e75db66dbb2f4d8403f29133c1829a6e7a5055752f4267aea3a23894\\\": context canceled\"" pod="openshift-marketplace/redhat-marketplace-tk84c" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.333394 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 16:21:36 crc kubenswrapper[4768]: E1203 16:21:36.334312 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af7698ed-1b09-4969-b352-efe8df4b1cde" containerName="pruner" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.334332 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="af7698ed-1b09-4969-b352-efe8df4b1cde" containerName="pruner" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.334515 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="af7698ed-1b09-4969-b352-efe8df4b1cde" containerName="pruner" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.335480 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.354571 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.412408 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7260892-c835-4ebc-b4a6-261563972c83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.412525 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e7260892-c835-4ebc-b4a6-261563972c83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.513899 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7260892-c835-4ebc-b4a6-261563972c83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.513998 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e7260892-c835-4ebc-b4a6-261563972c83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.514119 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e7260892-c835-4ebc-b4a6-261563972c83-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.548140 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7260892-c835-4ebc-b4a6-261563972c83-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:36 crc kubenswrapper[4768]: I1203 16:21:36.678982 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.527271 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.530939 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.545009 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.717307 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.717380 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e428b7d-5e25-414b-8a31-f8507110a848-kube-api-access\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.717937 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-var-lock\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.818771 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-var-lock\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.818861 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.818895 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e428b7d-5e25-414b-8a31-f8507110a848-kube-api-access\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.818905 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-var-lock\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.819000 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.842876 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e428b7d-5e25-414b-8a31-f8507110a848-kube-api-access\") pod \"installer-9-crc\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:41 crc kubenswrapper[4768]: E1203 16:21:41.884637 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-tk84c" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" Dec 03 16:21:41 crc kubenswrapper[4768]: I1203 16:21:41.929505 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:21:42 crc kubenswrapper[4768]: I1203 16:21:42.137220 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 03 16:21:46 crc kubenswrapper[4768]: E1203 16:21:46.815247 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 16:21:46 crc kubenswrapper[4768]: E1203 16:21:46.816015 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-n7nt5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-xjxcv_openshift-marketplace(3f74d79d-8026-4a3f-b910-d0245a90c975): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:21:46 crc kubenswrapper[4768]: E1203 16:21:46.817234 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-xjxcv" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" Dec 03 16:21:49 crc kubenswrapper[4768]: E1203 16:21:49.032514 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 03 16:21:49 crc kubenswrapper[4768]: E1203 16:21:49.032749 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m8qsb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-nt5hc_openshift-marketplace(107b8214-a1ff-4bee-96ea-4e3e9c176635): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:21:49 crc kubenswrapper[4768]: E1203 16:21:49.033952 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-nt5hc" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" Dec 03 16:21:49 crc kubenswrapper[4768]: W1203 16:21:49.258351 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod70b844dc_9d79_41b5_a2f4_4da1d89ff0c6.slice/crio-a22a6f694ea5a1b32383fd33aa273c5a6c660f78d427b29a9ff963ce30ab7edd WatchSource:0}: Error finding container a22a6f694ea5a1b32383fd33aa273c5a6c660f78d427b29a9ff963ce30ab7edd: Status 404 returned error can't find the container with id a22a6f694ea5a1b32383fd33aa273c5a6c660f78d427b29a9ff963ce30ab7edd Dec 03 16:21:50 crc kubenswrapper[4768]: I1203 16:21:50.166623 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6","Type":"ContainerStarted","Data":"a22a6f694ea5a1b32383fd33aa273c5a6c660f78d427b29a9ff963ce30ab7edd"} Dec 03 16:21:50 crc kubenswrapper[4768]: E1203 16:21:50.773914 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 16:21:50 crc kubenswrapper[4768]: E1203 16:21:50.774139 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hfzqt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-dnd7v_openshift-marketplace(4de573e8-81fb-425f-ac06-4eed5a1f8e78): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:21:50 crc kubenswrapper[4768]: E1203 16:21:50.776051 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-dnd7v" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" Dec 03 16:21:51 crc kubenswrapper[4768]: E1203 16:21:51.146989 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 03 16:21:51 crc kubenswrapper[4768]: E1203 16:21:51.147465 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qlzwj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-ckrtd_openshift-marketplace(1f32e60f-f730-46ac-ab05-0ac46370495c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:21:51 crc kubenswrapper[4768]: E1203 16:21:51.148753 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-ckrtd" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" Dec 03 16:21:51 crc kubenswrapper[4768]: E1203 16:21:51.700103 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-xjxcv" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" Dec 03 16:21:51 crc kubenswrapper[4768]: E1203 16:21:51.700150 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-ckrtd" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" Dec 03 16:21:51 crc kubenswrapper[4768]: E1203 16:21:51.700122 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-nt5hc" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" Dec 03 16:21:51 crc kubenswrapper[4768]: E1203 16:21:51.700284 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-dnd7v" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" Dec 03 16:21:54 crc kubenswrapper[4768]: E1203 16:21:54.861656 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 16:21:54 crc kubenswrapper[4768]: E1203 16:21:54.862365 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2rvfq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-tpql2_openshift-marketplace(b5ac3fee-d987-40bd-afd8-c85d8c0311e7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:21:54 crc kubenswrapper[4768]: E1203 16:21:54.863639 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-tpql2" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" Dec 03 16:21:55 crc kubenswrapper[4768]: E1203 16:21:55.009616 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 03 16:21:55 crc kubenswrapper[4768]: E1203 16:21:55.010210 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lgk96,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-zx57v_openshift-marketplace(a8be6842-cdb8-4cb3-8df2-51eb10011545): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:21:55 crc kubenswrapper[4768]: E1203 16:21:55.017668 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-zx57v" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" Dec 03 16:21:55 crc kubenswrapper[4768]: I1203 16:21:55.121225 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 03 16:21:55 crc kubenswrapper[4768]: I1203 16:21:55.144185 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 03 16:21:55 crc kubenswrapper[4768]: I1203 16:21:55.195946 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e7260892-c835-4ebc-b4a6-261563972c83","Type":"ContainerStarted","Data":"beb835ed5381066316acdfdec9276904a02928760be8d2674ef9cf585b967c83"} Dec 03 16:21:55 crc kubenswrapper[4768]: I1203 16:21:55.200290 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e428b7d-5e25-414b-8a31-f8507110a848","Type":"ContainerStarted","Data":"cf7bc84dbbc50532968cb3c6af13f88a293bed34ea4e0c1a66c310438d324926"} Dec 03 16:21:55 crc kubenswrapper[4768]: I1203 16:21:55.202502 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xt59" event={"ID":"f033c93d-ce31-465a-8466-049ff04809ff","Type":"ContainerStarted","Data":"d171a94a51f66f1ec537c5fa7cecff2b56619ea9800974d2d986c6be5b09c2f4"} Dec 03 16:21:55 crc kubenswrapper[4768]: I1203 16:21:55.487631 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-j25k6"] Dec 03 16:21:55 crc kubenswrapper[4768]: W1203 16:21:55.491759 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod772886ae_dcfc_418e_ac82_49d7844c99f1.slice/crio-6eabb228e05d115f381831c6b55856f96fd163be7af67280bb9570114c1fd3bc WatchSource:0}: Error finding container 6eabb228e05d115f381831c6b55856f96fd163be7af67280bb9570114c1fd3bc: Status 404 returned error can't find the container with id 6eabb228e05d115f381831c6b55856f96fd163be7af67280bb9570114c1fd3bc Dec 03 16:21:55 crc kubenswrapper[4768]: E1203 16:21:55.828688 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2cff238_c110_484d_8a20_dc2ac6b132ec.slice/crio-conmon-c815ed808fe1c5f8a9e8542112dfb944b5a50afc4940883763cd2ee3c3428533.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb2cff238_c110_484d_8a20_dc2ac6b132ec.slice/crio-c815ed808fe1c5f8a9e8542112dfb944b5a50afc4940883763cd2ee3c3428533.scope\": RecentStats: unable to find data in memory cache]" Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.028341 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.028610 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.028695 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.029401 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.029528 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099" gracePeriod=600 Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.219477 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j25k6" event={"ID":"772886ae-dcfc-418e-ac82-49d7844c99f1","Type":"ContainerStarted","Data":"a2d6fbc0290c5018ec72c73e41860c3065689f37ea1937461ae9f26363366bb0"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.219533 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j25k6" event={"ID":"772886ae-dcfc-418e-ac82-49d7844c99f1","Type":"ContainerStarted","Data":"2b7c4ba6e9c4082dea43c7248135a83e856d9ba6f3ca7ef30a9716af32ee278a"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.219548 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-j25k6" event={"ID":"772886ae-dcfc-418e-ac82-49d7844c99f1","Type":"ContainerStarted","Data":"6eabb228e05d115f381831c6b55856f96fd163be7af67280bb9570114c1fd3bc"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.222586 4768 generic.go:334] "Generic (PLEG): container finished" podID="e7260892-c835-4ebc-b4a6-261563972c83" containerID="4b9adf2c756719ab59baae62510022601d297fa07267b543df913e5ef8146f0c" exitCode=0 Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.222709 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e7260892-c835-4ebc-b4a6-261563972c83","Type":"ContainerDied","Data":"4b9adf2c756719ab59baae62510022601d297fa07267b543df913e5ef8146f0c"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.226107 4768 generic.go:334] "Generic (PLEG): container finished" podID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerID="c815ed808fe1c5f8a9e8542112dfb944b5a50afc4940883763cd2ee3c3428533" exitCode=0 Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.226174 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk84c" event={"ID":"b2cff238-c110-484d-8a20-dc2ac6b132ec","Type":"ContainerDied","Data":"c815ed808fe1c5f8a9e8542112dfb944b5a50afc4940883763cd2ee3c3428533"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.228667 4768 generic.go:334] "Generic (PLEG): container finished" podID="70b844dc-9d79-41b5-a2f4-4da1d89ff0c6" containerID="0ababd054cadccfab3ab82fe5ae00d97234c68f8f7104cef9d465cd0ab433f1b" exitCode=0 Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.228734 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6","Type":"ContainerDied","Data":"0ababd054cadccfab3ab82fe5ae00d97234c68f8f7104cef9d465cd0ab433f1b"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.240138 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-j25k6" podStartSLOduration=190.2401102 podStartE2EDuration="3m10.2401102s" podCreationTimestamp="2025-12-03 16:18:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:21:56.237085787 +0000 UTC m=+213.156422230" watchObservedRunningTime="2025-12-03 16:21:56.2401102 +0000 UTC m=+213.159446633" Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.246141 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099" exitCode=0 Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.246227 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.251776 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e428b7d-5e25-414b-8a31-f8507110a848","Type":"ContainerStarted","Data":"cf63aed412961625cb21441edc292325ba78af017db15941c05af524b404c954"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.258893 4768 generic.go:334] "Generic (PLEG): container finished" podID="f033c93d-ce31-465a-8466-049ff04809ff" containerID="d171a94a51f66f1ec537c5fa7cecff2b56619ea9800974d2d986c6be5b09c2f4" exitCode=0 Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.258944 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xt59" event={"ID":"f033c93d-ce31-465a-8466-049ff04809ff","Type":"ContainerDied","Data":"d171a94a51f66f1ec537c5fa7cecff2b56619ea9800974d2d986c6be5b09c2f4"} Dec 03 16:21:56 crc kubenswrapper[4768]: I1203 16:21:56.331927 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=15.33190487 podStartE2EDuration="15.33190487s" podCreationTimestamp="2025-12-03 16:21:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:21:56.331344734 +0000 UTC m=+213.250681157" watchObservedRunningTime="2025-12-03 16:21:56.33190487 +0000 UTC m=+213.251241293" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.269945 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"31ca24909cb1fcc053dd6be7141f97fe9a5ad5cf7a5523c30ec1bf9e0da43ac6"} Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.512251 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.521255 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.576378 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e7260892-c835-4ebc-b4a6-261563972c83-kubelet-dir\") pod \"e7260892-c835-4ebc-b4a6-261563972c83\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.576439 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kubelet-dir\") pod \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.576706 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "70b844dc-9d79-41b5-a2f4-4da1d89ff0c6" (UID: "70b844dc-9d79-41b5-a2f4-4da1d89ff0c6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.576794 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e7260892-c835-4ebc-b4a6-261563972c83-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e7260892-c835-4ebc-b4a6-261563972c83" (UID: "e7260892-c835-4ebc-b4a6-261563972c83"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.681305 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7260892-c835-4ebc-b4a6-261563972c83-kube-api-access\") pod \"e7260892-c835-4ebc-b4a6-261563972c83\" (UID: \"e7260892-c835-4ebc-b4a6-261563972c83\") " Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.681398 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kube-api-access\") pod \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\" (UID: \"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6\") " Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.681695 4768 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e7260892-c835-4ebc-b4a6-261563972c83-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.681718 4768 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.687298 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "70b844dc-9d79-41b5-a2f4-4da1d89ff0c6" (UID: "70b844dc-9d79-41b5-a2f4-4da1d89ff0c6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.687923 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7260892-c835-4ebc-b4a6-261563972c83-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7260892-c835-4ebc-b4a6-261563972c83" (UID: "e7260892-c835-4ebc-b4a6-261563972c83"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.783073 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7260892-c835-4ebc-b4a6-261563972c83-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:57 crc kubenswrapper[4768]: I1203 16:21:57.783132 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/70b844dc-9d79-41b5-a2f4-4da1d89ff0c6-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.276686 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xt59" event={"ID":"f033c93d-ce31-465a-8466-049ff04809ff","Type":"ContainerStarted","Data":"080e86850cc79c429b494e285e4c7f940a36b0f0480e020fe8456ec33f04db2a"} Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.278824 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.278865 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e7260892-c835-4ebc-b4a6-261563972c83","Type":"ContainerDied","Data":"beb835ed5381066316acdfdec9276904a02928760be8d2674ef9cf585b967c83"} Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.279318 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="beb835ed5381066316acdfdec9276904a02928760be8d2674ef9cf585b967c83" Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.281050 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk84c" event={"ID":"b2cff238-c110-484d-8a20-dc2ac6b132ec","Type":"ContainerStarted","Data":"340ca7cdbc33ad74ee0db7b288501de6f515b0daabfcf9d08686811c5b59cb32"} Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.282928 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"70b844dc-9d79-41b5-a2f4-4da1d89ff0c6","Type":"ContainerDied","Data":"a22a6f694ea5a1b32383fd33aa273c5a6c660f78d427b29a9ff963ce30ab7edd"} Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.283008 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a22a6f694ea5a1b32383fd33aa273c5a6c660f78d427b29a9ff963ce30ab7edd" Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.282946 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 03 16:21:58 crc kubenswrapper[4768]: I1203 16:21:58.303568 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-5xt59" podStartSLOduration=5.93942921 podStartE2EDuration="58.303552262s" podCreationTimestamp="2025-12-03 16:21:00 +0000 UTC" firstStartedPulling="2025-12-03 16:21:04.815989407 +0000 UTC m=+161.735325830" lastFinishedPulling="2025-12-03 16:21:57.180112459 +0000 UTC m=+214.099448882" observedRunningTime="2025-12-03 16:21:58.298429371 +0000 UTC m=+215.217765794" watchObservedRunningTime="2025-12-03 16:21:58.303552262 +0000 UTC m=+215.222888695" Dec 03 16:22:00 crc kubenswrapper[4768]: I1203 16:22:00.695691 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:22:00 crc kubenswrapper[4768]: I1203 16:22:00.696031 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:22:01 crc kubenswrapper[4768]: I1203 16:22:01.090262 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:22:01 crc kubenswrapper[4768]: I1203 16:22:01.110260 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tk84c" podStartSLOduration=7.74307951 podStartE2EDuration="1m1.11023667s" podCreationTimestamp="2025-12-03 16:21:00 +0000 UTC" firstStartedPulling="2025-12-03 16:21:03.806323897 +0000 UTC m=+160.725660320" lastFinishedPulling="2025-12-03 16:21:57.173481057 +0000 UTC m=+214.092817480" observedRunningTime="2025-12-03 16:21:58.319035697 +0000 UTC m=+215.238372120" watchObservedRunningTime="2025-12-03 16:22:01.11023667 +0000 UTC m=+218.029573093" Dec 03 16:22:01 crc kubenswrapper[4768]: I1203 16:22:01.111755 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:22:01 crc kubenswrapper[4768]: I1203 16:22:01.111853 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:22:01 crc kubenswrapper[4768]: I1203 16:22:01.164869 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:22:05 crc kubenswrapper[4768]: I1203 16:22:05.336890 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt5hc" event={"ID":"107b8214-a1ff-4bee-96ea-4e3e9c176635","Type":"ContainerStarted","Data":"ebdc38559f2abfadd65b1e05bf3bcd968840ed766c9b80b4e58c88f6ffbbd102"} Dec 03 16:22:06 crc kubenswrapper[4768]: I1203 16:22:06.344296 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjxcv" event={"ID":"3f74d79d-8026-4a3f-b910-d0245a90c975","Type":"ContainerStarted","Data":"878f95335232cd8652b3c05d03f74790ae4aedba8ac2376816f719d640e8ffc5"} Dec 03 16:22:06 crc kubenswrapper[4768]: I1203 16:22:06.345941 4768 generic.go:334] "Generic (PLEG): container finished" podID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerID="ebdc38559f2abfadd65b1e05bf3bcd968840ed766c9b80b4e58c88f6ffbbd102" exitCode=0 Dec 03 16:22:06 crc kubenswrapper[4768]: I1203 16:22:06.345990 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt5hc" event={"ID":"107b8214-a1ff-4bee-96ea-4e3e9c176635","Type":"ContainerDied","Data":"ebdc38559f2abfadd65b1e05bf3bcd968840ed766c9b80b4e58c88f6ffbbd102"} Dec 03 16:22:07 crc kubenswrapper[4768]: I1203 16:22:07.353987 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt5hc" event={"ID":"107b8214-a1ff-4bee-96ea-4e3e9c176635","Type":"ContainerStarted","Data":"93ea4a973604af99409a6a1b3961fa3c530d8f98692f161460f07a1a72f19a18"} Dec 03 16:22:07 crc kubenswrapper[4768]: I1203 16:22:07.357766 4768 generic.go:334] "Generic (PLEG): container finished" podID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerID="878f95335232cd8652b3c05d03f74790ae4aedba8ac2376816f719d640e8ffc5" exitCode=0 Dec 03 16:22:07 crc kubenswrapper[4768]: I1203 16:22:07.357802 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjxcv" event={"ID":"3f74d79d-8026-4a3f-b910-d0245a90c975","Type":"ContainerDied","Data":"878f95335232cd8652b3c05d03f74790ae4aedba8ac2376816f719d640e8ffc5"} Dec 03 16:22:07 crc kubenswrapper[4768]: I1203 16:22:07.390019 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nt5hc" podStartSLOduration=2.317373058 podStartE2EDuration="1m9.389998856s" podCreationTimestamp="2025-12-03 16:20:58 +0000 UTC" firstStartedPulling="2025-12-03 16:20:59.705864815 +0000 UTC m=+156.625201238" lastFinishedPulling="2025-12-03 16:22:06.778490613 +0000 UTC m=+223.697827036" observedRunningTime="2025-12-03 16:22:07.371650653 +0000 UTC m=+224.290987096" watchObservedRunningTime="2025-12-03 16:22:07.389998856 +0000 UTC m=+224.309335279" Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.365078 4768 generic.go:334] "Generic (PLEG): container finished" podID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerID="bd2a7454ed23e0e5c76596c39824765d899e47646c0c8a59a2138738561768ef" exitCode=0 Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.365186 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx57v" event={"ID":"a8be6842-cdb8-4cb3-8df2-51eb10011545","Type":"ContainerDied","Data":"bd2a7454ed23e0e5c76596c39824765d899e47646c0c8a59a2138738561768ef"} Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.368885 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjxcv" event={"ID":"3f74d79d-8026-4a3f-b910-d0245a90c975","Type":"ContainerStarted","Data":"d5a34714a9993d6c104a4b3ba855afd1f63af69477e1f04f76fdb780e31bec3c"} Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.374117 4768 generic.go:334] "Generic (PLEG): container finished" podID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerID="beb8d82e5b5a854ed9d9ee12971d9ae336123c48eca7c79557c1b0ac9d637d5d" exitCode=0 Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.374165 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dnd7v" event={"ID":"4de573e8-81fb-425f-ac06-4eed5a1f8e78","Type":"ContainerDied","Data":"beb8d82e5b5a854ed9d9ee12971d9ae336123c48eca7c79557c1b0ac9d637d5d"} Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.377870 4768 generic.go:334] "Generic (PLEG): container finished" podID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerID="f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e" exitCode=0 Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.377910 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckrtd" event={"ID":"1f32e60f-f730-46ac-ab05-0ac46370495c","Type":"ContainerDied","Data":"f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e"} Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.444208 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xjxcv" podStartSLOduration=2.387620156 podStartE2EDuration="1m10.444193049s" podCreationTimestamp="2025-12-03 16:20:58 +0000 UTC" firstStartedPulling="2025-12-03 16:20:59.705717951 +0000 UTC m=+156.625054374" lastFinishedPulling="2025-12-03 16:22:07.762290844 +0000 UTC m=+224.681627267" observedRunningTime="2025-12-03 16:22:08.441570897 +0000 UTC m=+225.360907360" watchObservedRunningTime="2025-12-03 16:22:08.444193049 +0000 UTC m=+225.363529472" Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.658000 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.658080 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:22:08 crc kubenswrapper[4768]: I1203 16:22:08.697602 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.105567 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.106000 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.385306 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckrtd" event={"ID":"1f32e60f-f730-46ac-ab05-0ac46370495c","Type":"ContainerStarted","Data":"cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8"} Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.387466 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx57v" event={"ID":"a8be6842-cdb8-4cb3-8df2-51eb10011545","Type":"ContainerStarted","Data":"c64cb7ab4645e56eabbb4195f3fd67fd275d85b094e895958c25fd8ad295f619"} Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.389539 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dnd7v" event={"ID":"4de573e8-81fb-425f-ac06-4eed5a1f8e78","Type":"ContainerStarted","Data":"7d06d05bc1476a976b76c7d881f0d610df8c44db84471be2cc31bf707dac9319"} Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.410803 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ckrtd" podStartSLOduration=2.274667682 podStartE2EDuration="1m11.410785027s" podCreationTimestamp="2025-12-03 16:20:58 +0000 UTC" firstStartedPulling="2025-12-03 16:20:59.703309191 +0000 UTC m=+156.622645614" lastFinishedPulling="2025-12-03 16:22:08.839426536 +0000 UTC m=+225.758762959" observedRunningTime="2025-12-03 16:22:09.408066222 +0000 UTC m=+226.327402655" watchObservedRunningTime="2025-12-03 16:22:09.410785027 +0000 UTC m=+226.330121450" Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.430296 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zx57v" podStartSLOduration=4.478680099 podStartE2EDuration="1m8.430276592s" podCreationTimestamp="2025-12-03 16:21:01 +0000 UTC" firstStartedPulling="2025-12-03 16:21:04.820031932 +0000 UTC m=+161.739368355" lastFinishedPulling="2025-12-03 16:22:08.771628425 +0000 UTC m=+225.690964848" observedRunningTime="2025-12-03 16:22:09.426317143 +0000 UTC m=+226.345653576" watchObservedRunningTime="2025-12-03 16:22:09.430276592 +0000 UTC m=+226.349613035" Dec 03 16:22:09 crc kubenswrapper[4768]: I1203 16:22:09.455805 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dnd7v" podStartSLOduration=2.337740174 podStartE2EDuration="1m11.455766791s" podCreationTimestamp="2025-12-03 16:20:58 +0000 UTC" firstStartedPulling="2025-12-03 16:20:59.714040207 +0000 UTC m=+156.633376630" lastFinishedPulling="2025-12-03 16:22:08.832066824 +0000 UTC m=+225.751403247" observedRunningTime="2025-12-03 16:22:09.452215444 +0000 UTC m=+226.371551867" watchObservedRunningTime="2025-12-03 16:22:09.455766791 +0000 UTC m=+226.375103214" Dec 03 16:22:10 crc kubenswrapper[4768]: I1203 16:22:10.145042 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-xjxcv" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="registry-server" probeResult="failure" output=< Dec 03 16:22:10 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 16:22:10 crc kubenswrapper[4768]: > Dec 03 16:22:10 crc kubenswrapper[4768]: I1203 16:22:10.736317 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:22:11 crc kubenswrapper[4768]: I1203 16:22:11.158352 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:22:11 crc kubenswrapper[4768]: I1203 16:22:11.867232 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:22:11 crc kubenswrapper[4768]: I1203 16:22:11.867287 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:22:12 crc kubenswrapper[4768]: I1203 16:22:12.906062 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zx57v" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="registry-server" probeResult="failure" output=< Dec 03 16:22:12 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 16:22:12 crc kubenswrapper[4768]: > Dec 03 16:22:14 crc kubenswrapper[4768]: I1203 16:22:14.909636 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk84c"] Dec 03 16:22:14 crc kubenswrapper[4768]: I1203 16:22:14.910136 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tk84c" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="registry-server" containerID="cri-o://340ca7cdbc33ad74ee0db7b288501de6f515b0daabfcf9d08686811c5b59cb32" gracePeriod=2 Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.430409 4768 generic.go:334] "Generic (PLEG): container finished" podID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerID="340ca7cdbc33ad74ee0db7b288501de6f515b0daabfcf9d08686811c5b59cb32" exitCode=0 Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.430485 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk84c" event={"ID":"b2cff238-c110-484d-8a20-dc2ac6b132ec","Type":"ContainerDied","Data":"340ca7cdbc33ad74ee0db7b288501de6f515b0daabfcf9d08686811c5b59cb32"} Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.556241 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.648331 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ghwv\" (UniqueName: \"kubernetes.io/projected/b2cff238-c110-484d-8a20-dc2ac6b132ec-kube-api-access-9ghwv\") pod \"b2cff238-c110-484d-8a20-dc2ac6b132ec\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.648435 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-utilities\") pod \"b2cff238-c110-484d-8a20-dc2ac6b132ec\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.648490 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-catalog-content\") pod \"b2cff238-c110-484d-8a20-dc2ac6b132ec\" (UID: \"b2cff238-c110-484d-8a20-dc2ac6b132ec\") " Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.650354 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-utilities" (OuterVolumeSpecName: "utilities") pod "b2cff238-c110-484d-8a20-dc2ac6b132ec" (UID: "b2cff238-c110-484d-8a20-dc2ac6b132ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.650915 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.659794 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2cff238-c110-484d-8a20-dc2ac6b132ec-kube-api-access-9ghwv" (OuterVolumeSpecName: "kube-api-access-9ghwv") pod "b2cff238-c110-484d-8a20-dc2ac6b132ec" (UID: "b2cff238-c110-484d-8a20-dc2ac6b132ec"). InnerVolumeSpecName "kube-api-access-9ghwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.666742 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b2cff238-c110-484d-8a20-dc2ac6b132ec" (UID: "b2cff238-c110-484d-8a20-dc2ac6b132ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.753095 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ghwv\" (UniqueName: \"kubernetes.io/projected/b2cff238-c110-484d-8a20-dc2ac6b132ec-kube-api-access-9ghwv\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:16 crc kubenswrapper[4768]: I1203 16:22:16.753163 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b2cff238-c110-484d-8a20-dc2ac6b132ec-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.436974 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tk84c" event={"ID":"b2cff238-c110-484d-8a20-dc2ac6b132ec","Type":"ContainerDied","Data":"afbfd5b606024f3fbbe57109c8620507c39b089aaa648d8298ca270f3012ff10"} Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.437039 4768 scope.go:117] "RemoveContainer" containerID="340ca7cdbc33ad74ee0db7b288501de6f515b0daabfcf9d08686811c5b59cb32" Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.437135 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tk84c" Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.448962 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tpql2" event={"ID":"b5ac3fee-d987-40bd-afd8-c85d8c0311e7","Type":"ContainerStarted","Data":"825e3e5d06919255cbc3639790ba2655bdef4689cd4643da1ca0c653c11dbc43"} Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.465825 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk84c"] Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.475537 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tk84c"] Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.475743 4768 scope.go:117] "RemoveContainer" containerID="c815ed808fe1c5f8a9e8542112dfb944b5a50afc4940883763cd2ee3c3428533" Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.975612 4768 scope.go:117] "RemoveContainer" containerID="c5d071ab2ec5be3ad81387b5319ac32943aeb6444938d2b2eac6e86e86896198" Dec 03 16:22:17 crc kubenswrapper[4768]: I1203 16:22:17.996440 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" path="/var/lib/kubelet/pods/b2cff238-c110-484d-8a20-dc2ac6b132ec/volumes" Dec 03 16:22:18 crc kubenswrapper[4768]: I1203 16:22:18.512013 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:22:18 crc kubenswrapper[4768]: I1203 16:22:18.513356 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:22:18 crc kubenswrapper[4768]: I1203 16:22:18.588770 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:22:18 crc kubenswrapper[4768]: I1203 16:22:18.729965 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:22:18 crc kubenswrapper[4768]: I1203 16:22:18.920637 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:22:18 crc kubenswrapper[4768]: I1203 16:22:18.920703 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:22:18 crc kubenswrapper[4768]: I1203 16:22:18.979581 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:22:19 crc kubenswrapper[4768]: I1203 16:22:19.156777 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:22:19 crc kubenswrapper[4768]: I1203 16:22:19.224195 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:22:19 crc kubenswrapper[4768]: I1203 16:22:19.472615 4768 generic.go:334] "Generic (PLEG): container finished" podID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerID="825e3e5d06919255cbc3639790ba2655bdef4689cd4643da1ca0c653c11dbc43" exitCode=0 Dec 03 16:22:19 crc kubenswrapper[4768]: I1203 16:22:19.473364 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tpql2" event={"ID":"b5ac3fee-d987-40bd-afd8-c85d8c0311e7","Type":"ContainerDied","Data":"825e3e5d06919255cbc3639790ba2655bdef4689cd4643da1ca0c653c11dbc43"} Dec 03 16:22:19 crc kubenswrapper[4768]: I1203 16:22:19.516980 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:22:19 crc kubenswrapper[4768]: I1203 16:22:19.552920 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:22:21 crc kubenswrapper[4768]: I1203 16:22:21.317252 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ckrtd"] Dec 03 16:22:21 crc kubenswrapper[4768]: I1203 16:22:21.486128 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ckrtd" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="registry-server" containerID="cri-o://cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8" gracePeriod=2 Dec 03 16:22:21 crc kubenswrapper[4768]: I1203 16:22:21.924022 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:22:21 crc kubenswrapper[4768]: I1203 16:22:21.992525 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:22:22 crc kubenswrapper[4768]: I1203 16:22:22.421933 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29lwk"] Dec 03 16:22:22 crc kubenswrapper[4768]: I1203 16:22:22.709962 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xjxcv"] Dec 03 16:22:22 crc kubenswrapper[4768]: I1203 16:22:22.710214 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xjxcv" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="registry-server" containerID="cri-o://d5a34714a9993d6c104a4b3ba855afd1f63af69477e1f04f76fdb780e31bec3c" gracePeriod=2 Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.116192 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.253135 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-catalog-content\") pod \"1f32e60f-f730-46ac-ab05-0ac46370495c\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.253219 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-utilities\") pod \"1f32e60f-f730-46ac-ab05-0ac46370495c\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.253520 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlzwj\" (UniqueName: \"kubernetes.io/projected/1f32e60f-f730-46ac-ab05-0ac46370495c-kube-api-access-qlzwj\") pod \"1f32e60f-f730-46ac-ab05-0ac46370495c\" (UID: \"1f32e60f-f730-46ac-ab05-0ac46370495c\") " Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.254816 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-utilities" (OuterVolumeSpecName: "utilities") pod "1f32e60f-f730-46ac-ab05-0ac46370495c" (UID: "1f32e60f-f730-46ac-ab05-0ac46370495c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.259714 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f32e60f-f730-46ac-ab05-0ac46370495c-kube-api-access-qlzwj" (OuterVolumeSpecName: "kube-api-access-qlzwj") pod "1f32e60f-f730-46ac-ab05-0ac46370495c" (UID: "1f32e60f-f730-46ac-ab05-0ac46370495c"). InnerVolumeSpecName "kube-api-access-qlzwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.312006 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1f32e60f-f730-46ac-ab05-0ac46370495c" (UID: "1f32e60f-f730-46ac-ab05-0ac46370495c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.355447 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlzwj\" (UniqueName: \"kubernetes.io/projected/1f32e60f-f730-46ac-ab05-0ac46370495c-kube-api-access-qlzwj\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.355482 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.355491 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1f32e60f-f730-46ac-ab05-0ac46370495c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.498870 4768 generic.go:334] "Generic (PLEG): container finished" podID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerID="cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8" exitCode=0 Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.498958 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckrtd" event={"ID":"1f32e60f-f730-46ac-ab05-0ac46370495c","Type":"ContainerDied","Data":"cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8"} Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.498970 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ckrtd" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.499747 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ckrtd" event={"ID":"1f32e60f-f730-46ac-ab05-0ac46370495c","Type":"ContainerDied","Data":"3179b86dd2db0ec3e3783331c46d2bf0d6e5db5510f7459fc8ad83a3f0244600"} Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.499821 4768 scope.go:117] "RemoveContainer" containerID="cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.502240 4768 generic.go:334] "Generic (PLEG): container finished" podID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerID="d5a34714a9993d6c104a4b3ba855afd1f63af69477e1f04f76fdb780e31bec3c" exitCode=0 Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.502274 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjxcv" event={"ID":"3f74d79d-8026-4a3f-b910-d0245a90c975","Type":"ContainerDied","Data":"d5a34714a9993d6c104a4b3ba855afd1f63af69477e1f04f76fdb780e31bec3c"} Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.515377 4768 scope.go:117] "RemoveContainer" containerID="f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.529991 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ckrtd"] Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.538882 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ckrtd"] Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.724872 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zx57v"] Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.725781 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zx57v" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="registry-server" containerID="cri-o://c64cb7ab4645e56eabbb4195f3fd67fd275d85b094e895958c25fd8ad295f619" gracePeriod=2 Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.772801 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.860694 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-utilities\") pod \"3f74d79d-8026-4a3f-b910-d0245a90c975\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.860872 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-catalog-content\") pod \"3f74d79d-8026-4a3f-b910-d0245a90c975\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.860988 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7nt5\" (UniqueName: \"kubernetes.io/projected/3f74d79d-8026-4a3f-b910-d0245a90c975-kube-api-access-n7nt5\") pod \"3f74d79d-8026-4a3f-b910-d0245a90c975\" (UID: \"3f74d79d-8026-4a3f-b910-d0245a90c975\") " Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.865500 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-utilities" (OuterVolumeSpecName: "utilities") pod "3f74d79d-8026-4a3f-b910-d0245a90c975" (UID: "3f74d79d-8026-4a3f-b910-d0245a90c975"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.866215 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f74d79d-8026-4a3f-b910-d0245a90c975-kube-api-access-n7nt5" (OuterVolumeSpecName: "kube-api-access-n7nt5") pod "3f74d79d-8026-4a3f-b910-d0245a90c975" (UID: "3f74d79d-8026-4a3f-b910-d0245a90c975"). InnerVolumeSpecName "kube-api-access-n7nt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.922509 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3f74d79d-8026-4a3f-b910-d0245a90c975" (UID: "3f74d79d-8026-4a3f-b910-d0245a90c975"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.962759 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.962798 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7nt5\" (UniqueName: \"kubernetes.io/projected/3f74d79d-8026-4a3f-b910-d0245a90c975-kube-api-access-n7nt5\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:23 crc kubenswrapper[4768]: I1203 16:22:23.962809 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f74d79d-8026-4a3f-b910-d0245a90c975-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:24 crc kubenswrapper[4768]: I1203 16:22:24.519124 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xjxcv" event={"ID":"3f74d79d-8026-4a3f-b910-d0245a90c975","Type":"ContainerDied","Data":"1ac3fdbb85ce0f8195e828166558f0c7ddc10230847417f50e32a1150468a347"} Dec 03 16:22:24 crc kubenswrapper[4768]: I1203 16:22:24.519139 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xjxcv" Dec 03 16:22:24 crc kubenswrapper[4768]: I1203 16:22:24.556018 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xjxcv"] Dec 03 16:22:24 crc kubenswrapper[4768]: I1203 16:22:24.558560 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xjxcv"] Dec 03 16:22:24 crc kubenswrapper[4768]: I1203 16:22:24.682158 4768 scope.go:117] "RemoveContainer" containerID="c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.280458 4768 scope.go:117] "RemoveContainer" containerID="cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8" Dec 03 16:22:25 crc kubenswrapper[4768]: E1203 16:22:25.281095 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8\": container with ID starting with cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8 not found: ID does not exist" containerID="cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.281124 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8"} err="failed to get container status \"cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8\": rpc error: code = NotFound desc = could not find container \"cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8\": container with ID starting with cbd4a828bdf2c4477efa599a4ba22c6754c51d06d9269ad19fe15698dfd2e2a8 not found: ID does not exist" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.281149 4768 scope.go:117] "RemoveContainer" containerID="f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e" Dec 03 16:22:25 crc kubenswrapper[4768]: E1203 16:22:25.281631 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e\": container with ID starting with f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e not found: ID does not exist" containerID="f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.281815 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e"} err="failed to get container status \"f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e\": rpc error: code = NotFound desc = could not find container \"f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e\": container with ID starting with f52df19c191508559dc6922a2343eb46b3c98e1a46747fd8fd2646f3ab3f2a1e not found: ID does not exist" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.281934 4768 scope.go:117] "RemoveContainer" containerID="c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770" Dec 03 16:22:25 crc kubenswrapper[4768]: E1203 16:22:25.283369 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770\": container with ID starting with c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770 not found: ID does not exist" containerID="c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.283405 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770"} err="failed to get container status \"c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770\": rpc error: code = NotFound desc = could not find container \"c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770\": container with ID starting with c74fb619a57cbc25a17d7f394b7cec0d29f88cf2b0b9fa77b2ff662aa3bc7770 not found: ID does not exist" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.283428 4768 scope.go:117] "RemoveContainer" containerID="d5a34714a9993d6c104a4b3ba855afd1f63af69477e1f04f76fdb780e31bec3c" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.547659 4768 generic.go:334] "Generic (PLEG): container finished" podID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerID="c64cb7ab4645e56eabbb4195f3fd67fd275d85b094e895958c25fd8ad295f619" exitCode=0 Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.552855 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" path="/var/lib/kubelet/pods/1f32e60f-f730-46ac-ab05-0ac46370495c/volumes" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.553525 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" path="/var/lib/kubelet/pods/3f74d79d-8026-4a3f-b910-d0245a90c975/volumes" Dec 03 16:22:25 crc kubenswrapper[4768]: I1203 16:22:25.554064 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx57v" event={"ID":"a8be6842-cdb8-4cb3-8df2-51eb10011545","Type":"ContainerDied","Data":"c64cb7ab4645e56eabbb4195f3fd67fd275d85b094e895958c25fd8ad295f619"} Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.034536 4768 scope.go:117] "RemoveContainer" containerID="878f95335232cd8652b3c05d03f74790ae4aedba8ac2376816f719d640e8ffc5" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.103279 4768 scope.go:117] "RemoveContainer" containerID="5e748e33a8e7bed9ee479b1895a711d8c003e9ae0ec55e1e1c0a21c3ef6910b1" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.116186 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.220201 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-catalog-content\") pod \"a8be6842-cdb8-4cb3-8df2-51eb10011545\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.220277 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgk96\" (UniqueName: \"kubernetes.io/projected/a8be6842-cdb8-4cb3-8df2-51eb10011545-kube-api-access-lgk96\") pod \"a8be6842-cdb8-4cb3-8df2-51eb10011545\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.220348 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-utilities\") pod \"a8be6842-cdb8-4cb3-8df2-51eb10011545\" (UID: \"a8be6842-cdb8-4cb3-8df2-51eb10011545\") " Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.221315 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-utilities" (OuterVolumeSpecName: "utilities") pod "a8be6842-cdb8-4cb3-8df2-51eb10011545" (UID: "a8be6842-cdb8-4cb3-8df2-51eb10011545"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.230485 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8be6842-cdb8-4cb3-8df2-51eb10011545-kube-api-access-lgk96" (OuterVolumeSpecName: "kube-api-access-lgk96") pod "a8be6842-cdb8-4cb3-8df2-51eb10011545" (UID: "a8be6842-cdb8-4cb3-8df2-51eb10011545"). InnerVolumeSpecName "kube-api-access-lgk96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.322393 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgk96\" (UniqueName: \"kubernetes.io/projected/a8be6842-cdb8-4cb3-8df2-51eb10011545-kube-api-access-lgk96\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.322729 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.327484 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8be6842-cdb8-4cb3-8df2-51eb10011545" (UID: "a8be6842-cdb8-4cb3-8df2-51eb10011545"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.423523 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8be6842-cdb8-4cb3-8df2-51eb10011545-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.566509 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zx57v" event={"ID":"a8be6842-cdb8-4cb3-8df2-51eb10011545","Type":"ContainerDied","Data":"b9b691a53169a9501a52b2e0b7ee617a2375db900b15a46ff9e74ef02117b386"} Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.566580 4768 scope.go:117] "RemoveContainer" containerID="c64cb7ab4645e56eabbb4195f3fd67fd275d85b094e895958c25fd8ad295f619" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.566634 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zx57v" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.584816 4768 scope.go:117] "RemoveContainer" containerID="bd2a7454ed23e0e5c76596c39824765d899e47646c0c8a59a2138738561768ef" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.607359 4768 scope.go:117] "RemoveContainer" containerID="088d0a17fe9662af75729905993f88b607e60d4674250a5c81fca93811946dc2" Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.613706 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zx57v"] Dec 03 16:22:28 crc kubenswrapper[4768]: I1203 16:22:28.617332 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zx57v"] Dec 03 16:22:29 crc kubenswrapper[4768]: I1203 16:22:29.550179 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" path="/var/lib/kubelet/pods/a8be6842-cdb8-4cb3-8df2-51eb10011545/volumes" Dec 03 16:22:29 crc kubenswrapper[4768]: I1203 16:22:29.575221 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tpql2" event={"ID":"b5ac3fee-d987-40bd-afd8-c85d8c0311e7","Type":"ContainerStarted","Data":"27dd9452f96f6c2b902ebd487915dc2da7b81bb82a507051b688871cd90a25b9"} Dec 03 16:22:31 crc kubenswrapper[4768]: I1203 16:22:31.651609 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:22:31 crc kubenswrapper[4768]: I1203 16:22:31.651934 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:22:32 crc kubenswrapper[4768]: I1203 16:22:32.713809 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-tpql2" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="registry-server" probeResult="failure" output=< Dec 03 16:22:32 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 16:22:32 crc kubenswrapper[4768]: > Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275506 4768 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275764 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275780 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275791 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275798 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275807 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275813 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275820 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275826 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275839 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275845 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275852 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275858 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275867 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275873 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275886 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7260892-c835-4ebc-b4a6-261563972c83" containerName="pruner" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275892 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7260892-c835-4ebc-b4a6-261563972c83" containerName="pruner" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275900 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275906 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275916 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b844dc-9d79-41b5-a2f4-4da1d89ff0c6" containerName="pruner" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275922 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b844dc-9d79-41b5-a2f4-4da1d89ff0c6" containerName="pruner" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275930 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275937 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="extract-utilities" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275948 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275955 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275962 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275968 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.275975 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.275981 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="extract-content" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.276071 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7260892-c835-4ebc-b4a6-261563972c83" containerName="pruner" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.276083 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f74d79d-8026-4a3f-b910-d0245a90c975" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.276092 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2cff238-c110-484d-8a20-dc2ac6b132ec" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.276098 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8be6842-cdb8-4cb3-8df2-51eb10011545" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.276108 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f32e60f-f730-46ac-ab05-0ac46370495c" containerName="registry-server" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.276116 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="70b844dc-9d79-41b5-a2f4-4da1d89ff0c6" containerName="pruner" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.276448 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.300455 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tpql2" podStartSLOduration=9.074301208 podStartE2EDuration="1m32.300439953s" podCreationTimestamp="2025-12-03 16:21:01 +0000 UTC" firstStartedPulling="2025-12-03 16:21:04.815547473 +0000 UTC m=+161.734883886" lastFinishedPulling="2025-12-03 16:22:28.041686208 +0000 UTC m=+244.961022631" observedRunningTime="2025-12-03 16:22:29.599486141 +0000 UTC m=+246.518822604" watchObservedRunningTime="2025-12-03 16:22:33.300439953 +0000 UTC m=+250.219776376" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.301170 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.325985 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.326076 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.326139 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.326221 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.326257 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.338317 4768 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.338615 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41" gracePeriod=15 Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.338654 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde" gracePeriod=15 Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.338680 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2" gracePeriod=15 Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.338731 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb" gracePeriod=15 Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.338721 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929" gracePeriod=15 Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.339673 4768 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.339859 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.339870 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.339881 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.339887 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.339895 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.339901 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.339910 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.339915 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.339922 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.339927 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.339936 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.339941 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.340035 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.340044 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.340054 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.340067 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.340075 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.426847 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.426899 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.426926 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.426949 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.426969 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427006 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427010 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427021 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427071 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427107 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427112 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427125 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.427259 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.527949 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.528044 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.528074 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.528143 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.528189 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.528221 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.534291 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.534649 4768 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:33 crc kubenswrapper[4768]: I1203 16:22:33.599656 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:22:33 crc kubenswrapper[4768]: W1203 16:22:33.621994 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-d00005ac9168f4860d9b7815d7ebab63c94505e26be27a2af91384b124e818e6 WatchSource:0}: Error finding container d00005ac9168f4860d9b7815d7ebab63c94505e26be27a2af91384b124e818e6: Status 404 returned error can't find the container with id d00005ac9168f4860d9b7815d7ebab63c94505e26be27a2af91384b124e818e6 Dec 03 16:22:33 crc kubenswrapper[4768]: E1203 16:22:33.625887 4768 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dc11bade4fe5d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:22:33.624845917 +0000 UTC m=+250.544182340,LastTimestamp:2025-12-03 16:22:33.624845917 +0000 UTC m=+250.544182340,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.601688 4768 generic.go:334] "Generic (PLEG): container finished" podID="5e428b7d-5e25-414b-8a31-f8507110a848" containerID="cf63aed412961625cb21441edc292325ba78af017db15941c05af524b404c954" exitCode=0 Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.601810 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e428b7d-5e25-414b-8a31-f8507110a848","Type":"ContainerDied","Data":"cf63aed412961625cb21441edc292325ba78af017db15941c05af524b404c954"} Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.602468 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.603019 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.607224 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.608366 4768 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde" exitCode=0 Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.608412 4768 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929" exitCode=0 Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.608429 4768 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2" exitCode=0 Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.608447 4768 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb" exitCode=2 Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.610812 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca"} Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.610865 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"d00005ac9168f4860d9b7815d7ebab63c94505e26be27a2af91384b124e818e6"} Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.612559 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:34 crc kubenswrapper[4768]: I1203 16:22:34.613032 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.755795 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.756863 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.757544 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.758338 4768 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.758614 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.811460 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.811540 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.811631 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.811696 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.811818 4768 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.811829 4768 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.895186 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.895835 4768 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.896141 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.896440 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.912880 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.913154 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.913311 4768 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:35 crc kubenswrapper[4768]: E1203 16:22:35.947380 4768 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: E1203 16:22:35.947839 4768 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: E1203 16:22:35.948265 4768 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: E1203 16:22:35.948760 4768 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: E1203 16:22:35.949239 4768 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:35 crc kubenswrapper[4768]: I1203 16:22:35.949286 4768 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 03 16:22:35 crc kubenswrapper[4768]: E1203 16:22:35.949588 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="200ms" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.014023 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-kubelet-dir\") pod \"5e428b7d-5e25-414b-8a31-f8507110a848\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.014176 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-var-lock\") pod \"5e428b7d-5e25-414b-8a31-f8507110a848\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.014229 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e428b7d-5e25-414b-8a31-f8507110a848-kube-api-access\") pod \"5e428b7d-5e25-414b-8a31-f8507110a848\" (UID: \"5e428b7d-5e25-414b-8a31-f8507110a848\") " Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.014587 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5e428b7d-5e25-414b-8a31-f8507110a848" (UID: "5e428b7d-5e25-414b-8a31-f8507110a848"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.014688 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-var-lock" (OuterVolumeSpecName: "var-lock") pod "5e428b7d-5e25-414b-8a31-f8507110a848" (UID: "5e428b7d-5e25-414b-8a31-f8507110a848"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.023014 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e428b7d-5e25-414b-8a31-f8507110a848-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5e428b7d-5e25-414b-8a31-f8507110a848" (UID: "5e428b7d-5e25-414b-8a31-f8507110a848"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.115637 4768 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.115668 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e428b7d-5e25-414b-8a31-f8507110a848-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.115680 4768 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e428b7d-5e25-414b-8a31-f8507110a848-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.150757 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="400ms" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.551423 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="800ms" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.623694 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e428b7d-5e25-414b-8a31-f8507110a848","Type":"ContainerDied","Data":"cf7bc84dbbc50532968cb3c6af13f88a293bed34ea4e0c1a66c310438d324926"} Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.624051 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf7bc84dbbc50532968cb3c6af13f88a293bed34ea4e0c1a66c310438d324926" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.624023 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.629255 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.631718 4768 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41" exitCode=0 Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.631808 4768 scope.go:117] "RemoveContainer" containerID="fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.631819 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.649802 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.650135 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.651247 4768 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.662241 4768 scope.go:117] "RemoveContainer" containerID="6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.665879 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.666457 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.667081 4768 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.689035 4768 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dc11bade4fe5d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:22:33.624845917 +0000 UTC m=+250.544182340,LastTimestamp:2025-12-03 16:22:33.624845917 +0000 UTC m=+250.544182340,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.691851 4768 scope.go:117] "RemoveContainer" containerID="170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.709111 4768 scope.go:117] "RemoveContainer" containerID="879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.726257 4768 scope.go:117] "RemoveContainer" containerID="4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.754425 4768 scope.go:117] "RemoveContainer" containerID="dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.774529 4768 scope.go:117] "RemoveContainer" containerID="fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.777565 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\": container with ID starting with fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde not found: ID does not exist" containerID="fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.777611 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde"} err="failed to get container status \"fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\": rpc error: code = NotFound desc = could not find container \"fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde\": container with ID starting with fe1eb288d88cd9880e7cdd76922c63398991eb4fc27ed46dde5f985e7c575cde not found: ID does not exist" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.777660 4768 scope.go:117] "RemoveContainer" containerID="6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.778131 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\": container with ID starting with 6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929 not found: ID does not exist" containerID="6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.778211 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929"} err="failed to get container status \"6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\": rpc error: code = NotFound desc = could not find container \"6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929\": container with ID starting with 6b3622096a060d741e7dddd58b281361e3c0b2c00a6a05d2931178a09745e929 not found: ID does not exist" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.778244 4768 scope.go:117] "RemoveContainer" containerID="170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.778741 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\": container with ID starting with 170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2 not found: ID does not exist" containerID="170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.778792 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2"} err="failed to get container status \"170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\": rpc error: code = NotFound desc = could not find container \"170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2\": container with ID starting with 170b73bb7cd2b529734b32c9b8ae9b3019f29c5f877bc08926f62789c50b6de2 not found: ID does not exist" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.778830 4768 scope.go:117] "RemoveContainer" containerID="879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.779135 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\": container with ID starting with 879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb not found: ID does not exist" containerID="879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.779220 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb"} err="failed to get container status \"879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\": rpc error: code = NotFound desc = could not find container \"879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb\": container with ID starting with 879498d7ece16a5347aebea3dc23d86e1a395b4062a22d681706a2bda5c9a6eb not found: ID does not exist" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.779249 4768 scope.go:117] "RemoveContainer" containerID="4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.780339 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\": container with ID starting with 4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41 not found: ID does not exist" containerID="4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.780396 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41"} err="failed to get container status \"4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\": rpc error: code = NotFound desc = could not find container \"4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41\": container with ID starting with 4ef42687c9c75396b82c16e51198895446e11d4d9c1f7e0309de2c99da101a41 not found: ID does not exist" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.780425 4768 scope.go:117] "RemoveContainer" containerID="dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f" Dec 03 16:22:36 crc kubenswrapper[4768]: E1203 16:22:36.780757 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\": container with ID starting with dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f not found: ID does not exist" containerID="dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f" Dec 03 16:22:36 crc kubenswrapper[4768]: I1203 16:22:36.780782 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f"} err="failed to get container status \"dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\": rpc error: code = NotFound desc = could not find container \"dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f\": container with ID starting with dc884f2edc9271d4e36889f69273d6776b43c642c8efea51aac27b485c82720f not found: ID does not exist" Dec 03 16:22:37 crc kubenswrapper[4768]: E1203 16:22:37.353539 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="1.6s" Dec 03 16:22:37 crc kubenswrapper[4768]: I1203 16:22:37.543671 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 03 16:22:38 crc kubenswrapper[4768]: E1203 16:22:38.954761 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="3.2s" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.711005 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.711872 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.712434 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.712959 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.754570 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.755534 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.756184 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:41 crc kubenswrapper[4768]: I1203 16:22:41.756726 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:42 crc kubenswrapper[4768]: E1203 16:22:42.156053 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="6.4s" Dec 03 16:22:42 crc kubenswrapper[4768]: E1203 16:22:42.603936 4768 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" volumeName="registry-storage" Dec 03 16:22:43 crc kubenswrapper[4768]: I1203 16:22:43.540795 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:43 crc kubenswrapper[4768]: I1203 16:22:43.541504 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:43 crc kubenswrapper[4768]: I1203 16:22:43.542858 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:46 crc kubenswrapper[4768]: E1203 16:22:46.691543 4768 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.38:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187dc11bade4fe5d openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-03 16:22:33.624845917 +0000 UTC m=+250.544182340,LastTimestamp:2025-12-03 16:22:33.624845917 +0000 UTC m=+250.544182340,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.458964 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" containerName="oauth-openshift" containerID="cri-o://0d39dafb7ca9f3046fdff61c85b570b25f69fa4750e91d6763d15e56352111e2" gracePeriod=15 Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.531732 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.532956 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.533653 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.534239 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.564873 4768 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.564902 4768 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:47 crc kubenswrapper[4768]: E1203 16:22:47.565320 4768 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.567003 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:47 crc kubenswrapper[4768]: W1203 16:22:47.614834 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-eaf6cfe70569dfb53c3c0d1043103e0be40e2b29af0e2bb8d963130c85458c82 WatchSource:0}: Error finding container eaf6cfe70569dfb53c3c0d1043103e0be40e2b29af0e2bb8d963130c85458c82: Status 404 returned error can't find the container with id eaf6cfe70569dfb53c3c0d1043103e0be40e2b29af0e2bb8d963130c85458c82 Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.705065 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"eaf6cfe70569dfb53c3c0d1043103e0be40e2b29af0e2bb8d963130c85458c82"} Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.712050 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.712088 4768 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e" exitCode=1 Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.712134 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e"} Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.712556 4768 scope.go:117] "RemoveContainer" containerID="ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.713137 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.713684 4768 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.713987 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.714137 4768 generic.go:334] "Generic (PLEG): container finished" podID="2b325ed1-652c-4b16-9f58-04cc416148fd" containerID="0d39dafb7ca9f3046fdff61c85b570b25f69fa4750e91d6763d15e56352111e2" exitCode=0 Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.714173 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" event={"ID":"2b325ed1-652c-4b16-9f58-04cc416148fd","Type":"ContainerDied","Data":"0d39dafb7ca9f3046fdff61c85b570b25f69fa4750e91d6763d15e56352111e2"} Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.714383 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.817054 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.817505 4768 status_manager.go:851] "Failed to get status for pod" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-29lwk\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.817947 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.818649 4768 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.818981 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.819402 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872002 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-policies\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872046 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-error\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872069 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-dir\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872101 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-cliconfig\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872131 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-service-ca\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872155 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-idp-0-file-data\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872178 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbg2b\" (UniqueName: \"kubernetes.io/projected/2b325ed1-652c-4b16-9f58-04cc416148fd-kube-api-access-qbg2b\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872206 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-serving-cert\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872236 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-router-certs\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872259 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-trusted-ca-bundle\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872287 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-session\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872311 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-login\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872341 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-ocp-branding-template\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872367 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-provider-selection\") pod \"2b325ed1-652c-4b16-9f58-04cc416148fd\" (UID: \"2b325ed1-652c-4b16-9f58-04cc416148fd\") " Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.872178 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.873111 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.873249 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.873653 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.874060 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.878731 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.879027 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.879194 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.879453 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.879478 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.879584 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.880165 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.880340 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.880930 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b325ed1-652c-4b16-9f58-04cc416148fd-kube-api-access-qbg2b" (OuterVolumeSpecName: "kube-api-access-qbg2b") pod "2b325ed1-652c-4b16-9f58-04cc416148fd" (UID: "2b325ed1-652c-4b16-9f58-04cc416148fd"). InnerVolumeSpecName "kube-api-access-qbg2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973188 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973229 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973243 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbg2b\" (UniqueName: \"kubernetes.io/projected/2b325ed1-652c-4b16-9f58-04cc416148fd-kube-api-access-qbg2b\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973255 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973270 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973283 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973295 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973308 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973319 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973330 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973348 4768 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973360 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973374 4768 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/2b325ed1-652c-4b16-9f58-04cc416148fd-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:47 crc kubenswrapper[4768]: I1203 16:22:47.973387 4768 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/2b325ed1-652c-4b16-9f58-04cc416148fd-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 03 16:22:48 crc kubenswrapper[4768]: E1203 16:22:48.558315 4768 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.38:6443: connect: connection refused" interval="7s" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.724743 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" event={"ID":"2b325ed1-652c-4b16-9f58-04cc416148fd","Type":"ContainerDied","Data":"a858a5ea35b18e1e83fbafc21a3186c346bd28d816ac0552e3e5a66a7350dd84"} Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.724805 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.724830 4768 scope.go:117] "RemoveContainer" containerID="0d39dafb7ca9f3046fdff61c85b570b25f69fa4750e91d6763d15e56352111e2" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.725870 4768 status_manager.go:851] "Failed to get status for pod" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-29lwk\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.726412 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.727156 4768 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="82458fd12e18fadf0a6f66bb729a1454e0f0832cf42ddcf50d773da6cf8c2b67" exitCode=0 Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.727215 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"82458fd12e18fadf0a6f66bb729a1454e0f0832cf42ddcf50d773da6cf8c2b67"} Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.727552 4768 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.727578 4768 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.727708 4768 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: E1203 16:22:48.728044 4768 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.728798 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.729836 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.730266 4768 status_manager.go:851] "Failed to get status for pod" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-29lwk\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.730982 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.731352 4768 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.731776 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.732121 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.735143 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.735213 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"548e0b0fae18ab7c26eee9ec1ba11c4aa1833194a2304a08c9b714838c53763a"} Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.736737 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.737177 4768 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.737994 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.738961 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.739516 4768 status_manager.go:851] "Failed to get status for pod" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-29lwk\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.773510 4768 status_manager.go:851] "Failed to get status for pod" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" pod="openshift-authentication/oauth-openshift-558db77b4-29lwk" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-29lwk\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.773926 4768 status_manager.go:851] "Failed to get status for pod" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.774274 4768 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.774654 4768 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:48 crc kubenswrapper[4768]: I1203 16:22:48.775013 4768 status_manager.go:851] "Failed to get status for pod" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" pod="openshift-marketplace/redhat-operators-tpql2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-tpql2\": dial tcp 38.102.83.38:6443: connect: connection refused" Dec 03 16:22:49 crc kubenswrapper[4768]: I1203 16:22:49.748649 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6b48538765a631ea5f211f348bb0a4368f3588a1888e1d92640d66119820b406"} Dec 03 16:22:49 crc kubenswrapper[4768]: I1203 16:22:49.749014 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1dc5c00da72e50586f2dbb9ce1b9efbbfc77ec3d21c6edac36cc219a883710c5"} Dec 03 16:22:49 crc kubenswrapper[4768]: I1203 16:22:49.749029 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1ad33ad1d2ac4a9aa35359a9c77d63ce4b6c96900ee2186cab43e2aaf05dd803"} Dec 03 16:22:50 crc kubenswrapper[4768]: I1203 16:22:50.765139 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"76cf990c6a1e85fa16dd7727102a5cf5dc4ea805dd1d106402187ed936148dc9"} Dec 03 16:22:50 crc kubenswrapper[4768]: I1203 16:22:50.765516 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f568354cf63a211beac43efa2c864f0643f21dc4fc280f96840c5dfadab6e63c"} Dec 03 16:22:50 crc kubenswrapper[4768]: I1203 16:22:50.765539 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:50 crc kubenswrapper[4768]: I1203 16:22:50.765549 4768 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:50 crc kubenswrapper[4768]: I1203 16:22:50.765579 4768 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:52 crc kubenswrapper[4768]: I1203 16:22:52.367824 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:22:52 crc kubenswrapper[4768]: I1203 16:22:52.368133 4768 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 16:22:52 crc kubenswrapper[4768]: I1203 16:22:52.368355 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 16:22:52 crc kubenswrapper[4768]: I1203 16:22:52.567704 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:52 crc kubenswrapper[4768]: I1203 16:22:52.567865 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:52 crc kubenswrapper[4768]: I1203 16:22:52.574563 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:53 crc kubenswrapper[4768]: I1203 16:22:53.865568 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:22:55 crc kubenswrapper[4768]: I1203 16:22:55.791706 4768 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:55 crc kubenswrapper[4768]: I1203 16:22:55.962484 4768 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="5761731e-3ffe-4e32-8d92-cb3ee3c99c16" Dec 03 16:22:56 crc kubenswrapper[4768]: I1203 16:22:56.812076 4768 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="5761731e-3ffe-4e32-8d92-cb3ee3c99c16" Dec 03 16:22:56 crc kubenswrapper[4768]: I1203 16:22:56.812541 4768 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:56 crc kubenswrapper[4768]: I1203 16:22:56.812787 4768 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:56 crc kubenswrapper[4768]: I1203 16:22:56.815867 4768 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://1ad33ad1d2ac4a9aa35359a9c77d63ce4b6c96900ee2186cab43e2aaf05dd803" Dec 03 16:22:56 crc kubenswrapper[4768]: I1203 16:22:56.815916 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:22:57 crc kubenswrapper[4768]: I1203 16:22:57.809918 4768 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:57 crc kubenswrapper[4768]: I1203 16:22:57.809961 4768 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:22:57 crc kubenswrapper[4768]: I1203 16:22:57.813620 4768 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="5761731e-3ffe-4e32-8d92-cb3ee3c99c16" Dec 03 16:23:02 crc kubenswrapper[4768]: I1203 16:23:02.368213 4768 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 16:23:02 crc kubenswrapper[4768]: I1203 16:23:02.368807 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 16:23:04 crc kubenswrapper[4768]: I1203 16:23:04.705424 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 03 16:23:04 crc kubenswrapper[4768]: I1203 16:23:04.790305 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 03 16:23:05 crc kubenswrapper[4768]: I1203 16:23:05.146715 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:23:05 crc kubenswrapper[4768]: I1203 16:23:05.290989 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 03 16:23:05 crc kubenswrapper[4768]: I1203 16:23:05.402791 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 03 16:23:05 crc kubenswrapper[4768]: I1203 16:23:05.668464 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 03 16:23:05 crc kubenswrapper[4768]: I1203 16:23:05.781610 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 03 16:23:05 crc kubenswrapper[4768]: I1203 16:23:05.871181 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 03 16:23:06 crc kubenswrapper[4768]: I1203 16:23:06.131033 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 03 16:23:06 crc kubenswrapper[4768]: I1203 16:23:06.822200 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 03 16:23:07 crc kubenswrapper[4768]: I1203 16:23:07.156999 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 03 16:23:07 crc kubenswrapper[4768]: I1203 16:23:07.399087 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 03 16:23:07 crc kubenswrapper[4768]: I1203 16:23:07.701283 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 03 16:23:07 crc kubenswrapper[4768]: I1203 16:23:07.848914 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.100946 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.198798 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.246942 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.355004 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.486070 4768 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.520146 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.638862 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.718944 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.738790 4768 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.748672 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.752414 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.754990 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.837678 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.838876 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.985085 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 03 16:23:08 crc kubenswrapper[4768]: I1203 16:23:08.992114 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.122264 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.135463 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.155888 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.298094 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.331947 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.346330 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.347401 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.454746 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.521671 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.521943 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.526337 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.595497 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.595655 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.617264 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.618456 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.637344 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.777646 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.848999 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.856551 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.900361 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 03 16:23:09 crc kubenswrapper[4768]: I1203 16:23:09.979315 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.075989 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.138788 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.208842 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.214905 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.238383 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.289693 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.338867 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.399670 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.409153 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.687449 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.742965 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.770171 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.797389 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.872253 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 03 16:23:10 crc kubenswrapper[4768]: I1203 16:23:10.924631 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.041294 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.112815 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.130577 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.145280 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.182301 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.208193 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.218653 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.284960 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.415806 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.515540 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.527189 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.589330 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.644765 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.713587 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.733208 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.757264 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.793540 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.801243 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.818549 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.849282 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.891694 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.904616 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.929199 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:23:11 crc kubenswrapper[4768]: I1203 16:23:11.929412 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.022999 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.054686 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.103058 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.113371 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.156644 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.164043 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.180226 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.304977 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.317343 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.322467 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.367923 4768 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.367987 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.368048 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.368736 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"548e0b0fae18ab7c26eee9ec1ba11c4aa1833194a2304a08c9b714838c53763a"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.368865 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://548e0b0fae18ab7c26eee9ec1ba11c4aa1833194a2304a08c9b714838c53763a" gracePeriod=30 Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.475299 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.553005 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.555486 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.579540 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.615645 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.653113 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.667740 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.752069 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 03 16:23:12 crc kubenswrapper[4768]: I1203 16:23:12.913989 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.076007 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.101880 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.177895 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.329640 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.445861 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.464427 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.465705 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.532571 4768 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.558257 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.587828 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.675701 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.749882 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.755108 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.779277 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.901309 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 03 16:23:13 crc kubenswrapper[4768]: I1203 16:23:13.941177 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.068233 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.072850 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.134090 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.201002 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.261013 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.307984 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.438245 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.521135 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.542343 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.719179 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.797468 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.908237 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.924136 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 03 16:23:14 crc kubenswrapper[4768]: I1203 16:23:14.976189 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.081733 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.088027 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.088472 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.090242 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.094464 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.274751 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.330616 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.332635 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.403653 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.598006 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.608300 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.693700 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.761436 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.809894 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.930282 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.932766 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 03 16:23:15 crc kubenswrapper[4768]: I1203 16:23:15.967024 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.018864 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.038563 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.075436 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.150705 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.245859 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.277946 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.279661 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.285908 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.308351 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.396850 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.419060 4768 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.440349 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.553319 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.576076 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.599589 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.617952 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.672713 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.805464 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.833854 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.864100 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.895677 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.914733 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.915780 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.950985 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 03 16:23:16 crc kubenswrapper[4768]: I1203 16:23:16.989146 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.004185 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.104841 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.111376 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.132317 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.135941 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.148537 4768 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.151725 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=44.151704212 podStartE2EDuration="44.151704212s" podCreationTimestamp="2025-12-03 16:22:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:22:55.917129431 +0000 UTC m=+272.836465864" watchObservedRunningTime="2025-12-03 16:23:17.151704212 +0000 UTC m=+294.071040635" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.153864 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29lwk","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.153919 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-786b6d57dd-vgwlt","openshift-kube-apiserver/kube-apiserver-crc"] Dec 03 16:23:17 crc kubenswrapper[4768]: E1203 16:23:17.154118 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" containerName="installer" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.154132 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" containerName="installer" Dec 03 16:23:17 crc kubenswrapper[4768]: E1203 16:23:17.154147 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" containerName="oauth-openshift" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.154155 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" containerName="oauth-openshift" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.154360 4768 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.154374 4768 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="edc4814f-6b71-488a-8a4b-f76360b9d1b6" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.154499 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e428b7d-5e25-414b-8a31-f8507110a848" containerName="installer" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.154536 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" containerName="oauth-openshift" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.154748 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.155194 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.159692 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.162989 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.163136 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.163145 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.163331 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.163532 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.165941 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.167440 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.168920 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.168995 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.169190 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.170241 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.173921 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.178868 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.183244 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.193093 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.204622 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.204579801 podStartE2EDuration="22.204579801s" podCreationTimestamp="2025-12-03 16:22:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:23:17.198141343 +0000 UTC m=+294.117477826" watchObservedRunningTime="2025-12-03 16:23:17.204579801 +0000 UTC m=+294.123916224" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.240525 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.288344 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.306878 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-audit-policies\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.307251 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.307473 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-error\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.307684 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-router-certs\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.307893 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-login\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.308082 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.308259 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.308482 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.308701 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-service-ca\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.308909 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-serving-cert\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.309113 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf1e74b9-5813-46a6-a6dc-069959af0677-audit-dir\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.309290 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-session\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.309516 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzqmh\" (UniqueName: \"kubernetes.io/projected/bf1e74b9-5813-46a6-a6dc-069959af0677-kube-api-access-kzqmh\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.309754 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-cliconfig\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.340648 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.411378 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzqmh\" (UniqueName: \"kubernetes.io/projected/bf1e74b9-5813-46a6-a6dc-069959af0677-kube-api-access-kzqmh\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.411905 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-cliconfig\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.412136 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-audit-policies\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.412424 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.412723 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-error\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.413016 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-router-certs\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.413300 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-login\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.413177 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-cliconfig\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.413391 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-audit-policies\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.413568 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.414223 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.414464 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.414781 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-service-ca\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.415052 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-serving-cert\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.415341 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf1e74b9-5813-46a6-a6dc-069959af0677-audit-dir\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.415540 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-session\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.415804 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bf1e74b9-5813-46a6-a6dc-069959af0677-audit-dir\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.415894 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-service-ca\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.416228 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.419899 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-router-certs\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.420851 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.421130 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.421807 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-login\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.422012 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-serving-cert\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.422322 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-template-error\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.423694 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.425893 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bf1e74b9-5813-46a6-a6dc-069959af0677-v4-0-config-system-session\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.441032 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.441274 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzqmh\" (UniqueName: \"kubernetes.io/projected/bf1e74b9-5813-46a6-a6dc-069959af0677-kube-api-access-kzqmh\") pod \"oauth-openshift-786b6d57dd-vgwlt\" (UID: \"bf1e74b9-5813-46a6-a6dc-069959af0677\") " pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.476386 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.760722 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.833757 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.834941 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.835515 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.835932 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.840747 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.853553 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b325ed1-652c-4b16-9f58-04cc416148fd" path="/var/lib/kubelet/pods/2b325ed1-652c-4b16-9f58-04cc416148fd/volumes" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.878014 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.921754 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.939269 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.968614 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.984955 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 03 16:23:17 crc kubenswrapper[4768]: I1203 16:23:17.986907 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.029370 4768 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.029752 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca" gracePeriod=5 Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.136344 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.224329 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.252720 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.253867 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.671503 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.718890 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.749156 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.853178 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.894949 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 03 16:23:18 crc kubenswrapper[4768]: I1203 16:23:18.931882 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.022925 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.163693 4768 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.199981 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.310684 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.423516 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.501098 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.866897 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.927301 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.950253 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 16:23:19 crc kubenswrapper[4768]: I1203 16:23:19.991734 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 03 16:23:20 crc kubenswrapper[4768]: I1203 16:23:20.024029 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-786b6d57dd-vgwlt"] Dec 03 16:23:20 crc kubenswrapper[4768]: I1203 16:23:20.117087 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 03 16:23:20 crc kubenswrapper[4768]: I1203 16:23:20.405917 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 03 16:23:20 crc kubenswrapper[4768]: I1203 16:23:20.429217 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 03 16:23:20 crc kubenswrapper[4768]: I1203 16:23:20.569254 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 03 16:23:20 crc kubenswrapper[4768]: I1203 16:23:20.754858 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 03 16:23:20 crc kubenswrapper[4768]: I1203 16:23:20.811317 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 03 16:23:21 crc kubenswrapper[4768]: I1203 16:23:21.000119 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" event={"ID":"bf1e74b9-5813-46a6-a6dc-069959af0677","Type":"ContainerStarted","Data":"4aac44d54b8b16b6695d69b364035097de524f0da7dafaadc701fde1c8e6da75"} Dec 03 16:23:21 crc kubenswrapper[4768]: I1203 16:23:21.001695 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:21 crc kubenswrapper[4768]: I1203 16:23:21.001972 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" event={"ID":"bf1e74b9-5813-46a6-a6dc-069959af0677","Type":"ContainerStarted","Data":"04c6105de6a2d8cb1d4a88b46fefc0e6f9ec0836b5c310f298a31833e1ff1c83"} Dec 03 16:23:21 crc kubenswrapper[4768]: I1203 16:23:21.007767 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" Dec 03 16:23:21 crc kubenswrapper[4768]: I1203 16:23:21.040252 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-786b6d57dd-vgwlt" podStartSLOduration=59.040218129 podStartE2EDuration="59.040218129s" podCreationTimestamp="2025-12-03 16:22:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:23:21.040025683 +0000 UTC m=+297.959362196" watchObservedRunningTime="2025-12-03 16:23:21.040218129 +0000 UTC m=+297.959554592" Dec 03 16:23:21 crc kubenswrapper[4768]: I1203 16:23:21.284450 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 03 16:23:21 crc kubenswrapper[4768]: I1203 16:23:21.315445 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 03 16:23:22 crc kubenswrapper[4768]: I1203 16:23:22.865581 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.594631 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.594966 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.717653 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.717773 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.717837 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.717889 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.717886 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.717941 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.717965 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.718021 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.718131 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.718763 4768 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.718816 4768 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.718843 4768 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.718867 4768 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.735868 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:23:23 crc kubenswrapper[4768]: I1203 16:23:23.820115 4768 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 03 16:23:24 crc kubenswrapper[4768]: I1203 16:23:24.020144 4768 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca" exitCode=137 Dec 03 16:23:24 crc kubenswrapper[4768]: I1203 16:23:24.020226 4768 scope.go:117] "RemoveContainer" containerID="2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca" Dec 03 16:23:24 crc kubenswrapper[4768]: I1203 16:23:24.021552 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 03 16:23:24 crc kubenswrapper[4768]: I1203 16:23:24.042331 4768 scope.go:117] "RemoveContainer" containerID="2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca" Dec 03 16:23:24 crc kubenswrapper[4768]: E1203 16:23:24.042869 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca\": container with ID starting with 2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca not found: ID does not exist" containerID="2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca" Dec 03 16:23:24 crc kubenswrapper[4768]: I1203 16:23:24.042924 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca"} err="failed to get container status \"2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca\": rpc error: code = NotFound desc = could not find container \"2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca\": container with ID starting with 2da78b6a2a17b7ac3bcddf9e22942337120eeb4d135a03c02da3db513a5009ca not found: ID does not exist" Dec 03 16:23:24 crc kubenswrapper[4768]: I1203 16:23:24.071421 4768 kubelet.go:2706] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="c14ddb99-f56a-4297-b067-fbdc506262d7" Dec 03 16:23:25 crc kubenswrapper[4768]: I1203 16:23:25.543273 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 03 16:23:25 crc kubenswrapper[4768]: I1203 16:23:25.543748 4768 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 03 16:23:25 crc kubenswrapper[4768]: I1203 16:23:25.558758 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 16:23:25 crc kubenswrapper[4768]: I1203 16:23:25.558795 4768 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="c14ddb99-f56a-4297-b067-fbdc506262d7" Dec 03 16:23:25 crc kubenswrapper[4768]: I1203 16:23:25.565780 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 03 16:23:25 crc kubenswrapper[4768]: I1203 16:23:25.566058 4768 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="c14ddb99-f56a-4297-b067-fbdc506262d7" Dec 03 16:23:37 crc kubenswrapper[4768]: I1203 16:23:37.235139 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 03 16:23:38 crc kubenswrapper[4768]: I1203 16:23:38.735569 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 03 16:23:41 crc kubenswrapper[4768]: I1203 16:23:41.373449 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 03 16:23:43 crc kubenswrapper[4768]: I1203 16:23:43.159584 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 16:23:43 crc kubenswrapper[4768]: I1203 16:23:43.162103 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 03 16:23:43 crc kubenswrapper[4768]: I1203 16:23:43.162178 4768 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="548e0b0fae18ab7c26eee9ec1ba11c4aa1833194a2304a08c9b714838c53763a" exitCode=137 Dec 03 16:23:43 crc kubenswrapper[4768]: I1203 16:23:43.162217 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"548e0b0fae18ab7c26eee9ec1ba11c4aa1833194a2304a08c9b714838c53763a"} Dec 03 16:23:43 crc kubenswrapper[4768]: I1203 16:23:43.162257 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"717f873a78263cc23dfeaf631c5fc6040945688822ba8be01c12872704e68716"} Dec 03 16:23:43 crc kubenswrapper[4768]: I1203 16:23:43.162285 4768 scope.go:117] "RemoveContainer" containerID="ac16bfc558ae8f3ad3ac7874bcc05ee3b748f5ea743915496a62b62ea6e1198e" Dec 03 16:23:43 crc kubenswrapper[4768]: I1203 16:23:43.865731 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:23:44 crc kubenswrapper[4768]: I1203 16:23:44.174529 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 03 16:23:52 crc kubenswrapper[4768]: I1203 16:23:52.367938 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:23:52 crc kubenswrapper[4768]: I1203 16:23:52.376673 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:23:53 crc kubenswrapper[4768]: I1203 16:23:53.238999 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 03 16:23:53 crc kubenswrapper[4768]: I1203 16:23:53.328473 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 03 16:23:56 crc kubenswrapper[4768]: I1203 16:23:56.028644 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:23:56 crc kubenswrapper[4768]: I1203 16:23:56.028728 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:24:01 crc kubenswrapper[4768]: I1203 16:24:01.958832 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dngkv"] Dec 03 16:24:01 crc kubenswrapper[4768]: I1203 16:24:01.959526 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" podUID="7a481797-5e7f-4ce2-b128-ab8062e625cd" containerName="controller-manager" containerID="cri-o://027a6396ba13c80c8deadd71610f55a1fa0a709eca3b06751ccaa29e8223d308" gracePeriod=30 Dec 03 16:24:01 crc kubenswrapper[4768]: I1203 16:24:01.967966 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb"] Dec 03 16:24:01 crc kubenswrapper[4768]: I1203 16:24:01.968168 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" podUID="355764ad-6eb2-4f34-a2be-3708f8ecf73b" containerName="route-controller-manager" containerID="cri-o://aaa3aed05b42a80119e6b0b3881738addcd2ea0d6a72d6269e7f138b2730b60c" gracePeriod=30 Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.279417 4768 generic.go:334] "Generic (PLEG): container finished" podID="355764ad-6eb2-4f34-a2be-3708f8ecf73b" containerID="aaa3aed05b42a80119e6b0b3881738addcd2ea0d6a72d6269e7f138b2730b60c" exitCode=0 Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.279512 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" event={"ID":"355764ad-6eb2-4f34-a2be-3708f8ecf73b","Type":"ContainerDied","Data":"aaa3aed05b42a80119e6b0b3881738addcd2ea0d6a72d6269e7f138b2730b60c"} Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.283181 4768 generic.go:334] "Generic (PLEG): container finished" podID="7a481797-5e7f-4ce2-b128-ab8062e625cd" containerID="027a6396ba13c80c8deadd71610f55a1fa0a709eca3b06751ccaa29e8223d308" exitCode=0 Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.283214 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" event={"ID":"7a481797-5e7f-4ce2-b128-ab8062e625cd","Type":"ContainerDied","Data":"027a6396ba13c80c8deadd71610f55a1fa0a709eca3b06751ccaa29e8223d308"} Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.430792 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.511408 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.545639 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-config\") pod \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.545700 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfsgx\" (UniqueName: \"kubernetes.io/projected/355764ad-6eb2-4f34-a2be-3708f8ecf73b-kube-api-access-cfsgx\") pod \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.545721 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/355764ad-6eb2-4f34-a2be-3708f8ecf73b-serving-cert\") pod \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.545770 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-client-ca\") pod \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\" (UID: \"355764ad-6eb2-4f34-a2be-3708f8ecf73b\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.546553 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-client-ca" (OuterVolumeSpecName: "client-ca") pod "355764ad-6eb2-4f34-a2be-3708f8ecf73b" (UID: "355764ad-6eb2-4f34-a2be-3708f8ecf73b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.546728 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-config" (OuterVolumeSpecName: "config") pod "355764ad-6eb2-4f34-a2be-3708f8ecf73b" (UID: "355764ad-6eb2-4f34-a2be-3708f8ecf73b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.552847 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/355764ad-6eb2-4f34-a2be-3708f8ecf73b-kube-api-access-cfsgx" (OuterVolumeSpecName: "kube-api-access-cfsgx") pod "355764ad-6eb2-4f34-a2be-3708f8ecf73b" (UID: "355764ad-6eb2-4f34-a2be-3708f8ecf73b"). InnerVolumeSpecName "kube-api-access-cfsgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.553866 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/355764ad-6eb2-4f34-a2be-3708f8ecf73b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "355764ad-6eb2-4f34-a2be-3708f8ecf73b" (UID: "355764ad-6eb2-4f34-a2be-3708f8ecf73b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647249 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-client-ca\") pod \"7a481797-5e7f-4ce2-b128-ab8062e625cd\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647303 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp5gr\" (UniqueName: \"kubernetes.io/projected/7a481797-5e7f-4ce2-b128-ab8062e625cd-kube-api-access-sp5gr\") pod \"7a481797-5e7f-4ce2-b128-ab8062e625cd\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647354 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-proxy-ca-bundles\") pod \"7a481797-5e7f-4ce2-b128-ab8062e625cd\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647452 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-config\") pod \"7a481797-5e7f-4ce2-b128-ab8062e625cd\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647497 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a481797-5e7f-4ce2-b128-ab8062e625cd-serving-cert\") pod \"7a481797-5e7f-4ce2-b128-ab8062e625cd\" (UID: \"7a481797-5e7f-4ce2-b128-ab8062e625cd\") " Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647772 4768 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647787 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/355764ad-6eb2-4f34-a2be-3708f8ecf73b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647841 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfsgx\" (UniqueName: \"kubernetes.io/projected/355764ad-6eb2-4f34-a2be-3708f8ecf73b-kube-api-access-cfsgx\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.647857 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/355764ad-6eb2-4f34-a2be-3708f8ecf73b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.648172 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-client-ca" (OuterVolumeSpecName: "client-ca") pod "7a481797-5e7f-4ce2-b128-ab8062e625cd" (UID: "7a481797-5e7f-4ce2-b128-ab8062e625cd"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.648312 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7a481797-5e7f-4ce2-b128-ab8062e625cd" (UID: "7a481797-5e7f-4ce2-b128-ab8062e625cd"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.648634 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-config" (OuterVolumeSpecName: "config") pod "7a481797-5e7f-4ce2-b128-ab8062e625cd" (UID: "7a481797-5e7f-4ce2-b128-ab8062e625cd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.650952 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a481797-5e7f-4ce2-b128-ab8062e625cd-kube-api-access-sp5gr" (OuterVolumeSpecName: "kube-api-access-sp5gr") pod "7a481797-5e7f-4ce2-b128-ab8062e625cd" (UID: "7a481797-5e7f-4ce2-b128-ab8062e625cd"). InnerVolumeSpecName "kube-api-access-sp5gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.651364 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a481797-5e7f-4ce2-b128-ab8062e625cd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7a481797-5e7f-4ce2-b128-ab8062e625cd" (UID: "7a481797-5e7f-4ce2-b128-ab8062e625cd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.749297 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.749343 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a481797-5e7f-4ce2-b128-ab8062e625cd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.749360 4768 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.749372 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp5gr\" (UniqueName: \"kubernetes.io/projected/7a481797-5e7f-4ce2-b128-ab8062e625cd-kube-api-access-sp5gr\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:02 crc kubenswrapper[4768]: I1203 16:24:02.749385 4768 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7a481797-5e7f-4ce2-b128-ab8062e625cd-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.152421 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5c686f9869-h88g6"] Dec 03 16:24:03 crc kubenswrapper[4768]: E1203 16:24:03.152689 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="355764ad-6eb2-4f34-a2be-3708f8ecf73b" containerName="route-controller-manager" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.152704 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="355764ad-6eb2-4f34-a2be-3708f8ecf73b" containerName="route-controller-manager" Dec 03 16:24:03 crc kubenswrapper[4768]: E1203 16:24:03.152726 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a481797-5e7f-4ce2-b128-ab8062e625cd" containerName="controller-manager" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.152734 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a481797-5e7f-4ce2-b128-ab8062e625cd" containerName="controller-manager" Dec 03 16:24:03 crc kubenswrapper[4768]: E1203 16:24:03.152746 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.152754 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.152857 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="355764ad-6eb2-4f34-a2be-3708f8ecf73b" containerName="route-controller-manager" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.152886 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.152893 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a481797-5e7f-4ce2-b128-ab8062e625cd" containerName="controller-manager" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.153268 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.156369 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.157179 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.165015 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.168581 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c686f9869-h88g6"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254387 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e6cf345-5855-403f-98f2-f242e2a4c5c1-serving-cert\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254473 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-client-ca\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254500 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-client-ca\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254527 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-config\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254566 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-config\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254589 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89ttp\" (UniqueName: \"kubernetes.io/projected/13080004-259f-432b-b14a-664d5d0318fa-kube-api-access-89ttp\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254636 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc662\" (UniqueName: \"kubernetes.io/projected/4e6cf345-5855-403f-98f2-f242e2a4c5c1-kube-api-access-pc662\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254678 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13080004-259f-432b-b14a-664d5d0318fa-serving-cert\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.254701 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-proxy-ca-bundles\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.293370 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.293759 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-dngkv" event={"ID":"7a481797-5e7f-4ce2-b128-ab8062e625cd","Type":"ContainerDied","Data":"24523cf49cfbc1b05b718076c6af30bec41dbc289f992975cce795d3c4289d53"} Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.293870 4768 scope.go:117] "RemoveContainer" containerID="027a6396ba13c80c8deadd71610f55a1fa0a709eca3b06751ccaa29e8223d308" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.295985 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" event={"ID":"355764ad-6eb2-4f34-a2be-3708f8ecf73b","Type":"ContainerDied","Data":"617fc88f1a023f3fb5fca8ae1f8402dda81594d8aa852a1314222b3d3159a682"} Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.296042 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.320812 4768 scope.go:117] "RemoveContainer" containerID="aaa3aed05b42a80119e6b0b3881738addcd2ea0d6a72d6269e7f138b2730b60c" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.334455 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.342660 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-mkjqb"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.348414 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dngkv"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.352476 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-dngkv"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.356467 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13080004-259f-432b-b14a-664d5d0318fa-serving-cert\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.356710 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-proxy-ca-bundles\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.356917 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e6cf345-5855-403f-98f2-f242e2a4c5c1-serving-cert\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.357155 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-client-ca\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.357279 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-client-ca\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.357398 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-config\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.357534 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-config\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.357661 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89ttp\" (UniqueName: \"kubernetes.io/projected/13080004-259f-432b-b14a-664d5d0318fa-kube-api-access-89ttp\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.357786 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc662\" (UniqueName: \"kubernetes.io/projected/4e6cf345-5855-403f-98f2-f242e2a4c5c1-kube-api-access-pc662\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.358236 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-client-ca\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.358257 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-proxy-ca-bundles\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.358266 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-client-ca\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.358833 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-config\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.358921 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-config\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.363613 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e6cf345-5855-403f-98f2-f242e2a4c5c1-serving-cert\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.367674 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13080004-259f-432b-b14a-664d5d0318fa-serving-cert\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.373041 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89ttp\" (UniqueName: \"kubernetes.io/projected/13080004-259f-432b-b14a-664d5d0318fa-kube-api-access-89ttp\") pod \"route-controller-manager-6c5dd59bc9-rf9nb\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.378042 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc662\" (UniqueName: \"kubernetes.io/projected/4e6cf345-5855-403f-98f2-f242e2a4c5c1-kube-api-access-pc662\") pod \"controller-manager-5c686f9869-h88g6\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.480457 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.491852 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.545136 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="355764ad-6eb2-4f34-a2be-3708f8ecf73b" path="/var/lib/kubelet/pods/355764ad-6eb2-4f34-a2be-3708f8ecf73b/volumes" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.546620 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a481797-5e7f-4ce2-b128-ab8062e625cd" path="/var/lib/kubelet/pods/7a481797-5e7f-4ce2-b128-ab8062e625cd/volumes" Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.711589 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c686f9869-h88g6"] Dec 03 16:24:03 crc kubenswrapper[4768]: I1203 16:24:03.959769 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb"] Dec 03 16:24:03 crc kubenswrapper[4768]: W1203 16:24:03.965046 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13080004_259f_432b_b14a_664d5d0318fa.slice/crio-6dc30973dc3a5bb3e502c81627c1ca0996d28a64dc697156f053324138832848 WatchSource:0}: Error finding container 6dc30973dc3a5bb3e502c81627c1ca0996d28a64dc697156f053324138832848: Status 404 returned error can't find the container with id 6dc30973dc3a5bb3e502c81627c1ca0996d28a64dc697156f053324138832848 Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.303668 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" event={"ID":"4e6cf345-5855-403f-98f2-f242e2a4c5c1","Type":"ContainerStarted","Data":"1bbc8fc5c42750f9bfb55ffef09beb13932d041145c06c831c8eba7744882295"} Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.304669 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" event={"ID":"4e6cf345-5855-403f-98f2-f242e2a4c5c1","Type":"ContainerStarted","Data":"8aa54e57e58d2aa1bbef664489715340c0b44a3c3d9eb5125b437e07d8a27a15"} Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.304776 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.305256 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" event={"ID":"13080004-259f-432b-b14a-664d5d0318fa","Type":"ContainerStarted","Data":"267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334"} Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.305336 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" event={"ID":"13080004-259f-432b-b14a-664d5d0318fa","Type":"ContainerStarted","Data":"6dc30973dc3a5bb3e502c81627c1ca0996d28a64dc697156f053324138832848"} Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.305850 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.315931 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.325028 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" podStartSLOduration=3.325010378 podStartE2EDuration="3.325010378s" podCreationTimestamp="2025-12-03 16:24:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:24:04.322018593 +0000 UTC m=+341.241355016" watchObservedRunningTime="2025-12-03 16:24:04.325010378 +0000 UTC m=+341.244346801" Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.364112 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:04 crc kubenswrapper[4768]: I1203 16:24:04.401481 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" podStartSLOduration=2.401460546 podStartE2EDuration="2.401460546s" podCreationTimestamp="2025-12-03 16:24:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:24:04.36929798 +0000 UTC m=+341.288634413" watchObservedRunningTime="2025-12-03 16:24:04.401460546 +0000 UTC m=+341.320796969" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.278001 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nt5hc"] Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.280556 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nt5hc" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="registry-server" containerID="cri-o://93ea4a973604af99409a6a1b3961fa3c530d8f98692f161460f07a1a72f19a18" gracePeriod=30 Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.285705 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dnd7v"] Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.286168 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dnd7v" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="registry-server" containerID="cri-o://7d06d05bc1476a976b76c7d881f0d610df8c44db84471be2cc31bf707dac9319" gracePeriod=30 Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.317961 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9zxmd"] Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.318201 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" podUID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" containerName="marketplace-operator" containerID="cri-o://a7270c904125c21a4ac078512b8e8ad09de5b5c23e14202714b619424edb0548" gracePeriod=30 Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.335197 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5xt59"] Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.335755 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-5xt59" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="registry-server" containerID="cri-o://080e86850cc79c429b494e285e4c7f940a36b0f0480e020fe8456ec33f04db2a" gracePeriod=30 Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.339457 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tpql2"] Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.339922 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tpql2" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="registry-server" containerID="cri-o://27dd9452f96f6c2b902ebd487915dc2da7b81bb82a507051b688871cd90a25b9" gracePeriod=30 Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.341739 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9jljw"] Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.342479 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.354660 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9jljw"] Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.491976 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60d2c487-bb7a-43ee-a699-906a81e5627d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.492045 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmcqd\" (UniqueName: \"kubernetes.io/projected/60d2c487-bb7a-43ee-a699-906a81e5627d-kube-api-access-rmcqd\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.492102 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/60d2c487-bb7a-43ee-a699-906a81e5627d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.594943 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/60d2c487-bb7a-43ee-a699-906a81e5627d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.596514 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60d2c487-bb7a-43ee-a699-906a81e5627d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.596548 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmcqd\" (UniqueName: \"kubernetes.io/projected/60d2c487-bb7a-43ee-a699-906a81e5627d-kube-api-access-rmcqd\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.598201 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/60d2c487-bb7a-43ee-a699-906a81e5627d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.615719 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/60d2c487-bb7a-43ee-a699-906a81e5627d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.616005 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmcqd\" (UniqueName: \"kubernetes.io/projected/60d2c487-bb7a-43ee-a699-906a81e5627d-kube-api-access-rmcqd\") pod \"marketplace-operator-79b997595-9jljw\" (UID: \"60d2c487-bb7a-43ee-a699-906a81e5627d\") " pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:12 crc kubenswrapper[4768]: I1203 16:24:12.670770 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.153642 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9jljw"] Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.384420 4768 generic.go:334] "Generic (PLEG): container finished" podID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" containerID="a7270c904125c21a4ac078512b8e8ad09de5b5c23e14202714b619424edb0548" exitCode=0 Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.384538 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" event={"ID":"f296dd0a-7f14-44f1-bd50-368fd1a9f430","Type":"ContainerDied","Data":"a7270c904125c21a4ac078512b8e8ad09de5b5c23e14202714b619424edb0548"} Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.388420 4768 generic.go:334] "Generic (PLEG): container finished" podID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerID="93ea4a973604af99409a6a1b3961fa3c530d8f98692f161460f07a1a72f19a18" exitCode=0 Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.388478 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt5hc" event={"ID":"107b8214-a1ff-4bee-96ea-4e3e9c176635","Type":"ContainerDied","Data":"93ea4a973604af99409a6a1b3961fa3c530d8f98692f161460f07a1a72f19a18"} Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.391250 4768 generic.go:334] "Generic (PLEG): container finished" podID="f033c93d-ce31-465a-8466-049ff04809ff" containerID="080e86850cc79c429b494e285e4c7f940a36b0f0480e020fe8456ec33f04db2a" exitCode=0 Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.391360 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xt59" event={"ID":"f033c93d-ce31-465a-8466-049ff04809ff","Type":"ContainerDied","Data":"080e86850cc79c429b494e285e4c7f940a36b0f0480e020fe8456ec33f04db2a"} Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.392606 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" event={"ID":"60d2c487-bb7a-43ee-a699-906a81e5627d","Type":"ContainerStarted","Data":"6ab25bf5a53292dbe2a6f5166eea9e3ccb0081cee61f4897a89427ae6bd056c9"} Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.394835 4768 generic.go:334] "Generic (PLEG): container finished" podID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerID="7d06d05bc1476a976b76c7d881f0d610df8c44db84471be2cc31bf707dac9319" exitCode=0 Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.394892 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dnd7v" event={"ID":"4de573e8-81fb-425f-ac06-4eed5a1f8e78","Type":"ContainerDied","Data":"7d06d05bc1476a976b76c7d881f0d610df8c44db84471be2cc31bf707dac9319"} Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.396824 4768 generic.go:334] "Generic (PLEG): container finished" podID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerID="27dd9452f96f6c2b902ebd487915dc2da7b81bb82a507051b688871cd90a25b9" exitCode=0 Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.396882 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tpql2" event={"ID":"b5ac3fee-d987-40bd-afd8-c85d8c0311e7","Type":"ContainerDied","Data":"27dd9452f96f6c2b902ebd487915dc2da7b81bb82a507051b688871cd90a25b9"} Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.438113 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.613428 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-catalog-content\") pod \"107b8214-a1ff-4bee-96ea-4e3e9c176635\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.613518 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8qsb\" (UniqueName: \"kubernetes.io/projected/107b8214-a1ff-4bee-96ea-4e3e9c176635-kube-api-access-m8qsb\") pod \"107b8214-a1ff-4bee-96ea-4e3e9c176635\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.613672 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-utilities\") pod \"107b8214-a1ff-4bee-96ea-4e3e9c176635\" (UID: \"107b8214-a1ff-4bee-96ea-4e3e9c176635\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.617106 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-utilities" (OuterVolumeSpecName: "utilities") pod "107b8214-a1ff-4bee-96ea-4e3e9c176635" (UID: "107b8214-a1ff-4bee-96ea-4e3e9c176635"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.629861 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107b8214-a1ff-4bee-96ea-4e3e9c176635-kube-api-access-m8qsb" (OuterVolumeSpecName: "kube-api-access-m8qsb") pod "107b8214-a1ff-4bee-96ea-4e3e9c176635" (UID: "107b8214-a1ff-4bee-96ea-4e3e9c176635"). InnerVolumeSpecName "kube-api-access-m8qsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.677501 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.686033 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.694504 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "107b8214-a1ff-4bee-96ea-4e3e9c176635" (UID: "107b8214-a1ff-4bee-96ea-4e3e9c176635"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.715160 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.715200 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/107b8214-a1ff-4bee-96ea-4e3e9c176635-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.715211 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8qsb\" (UniqueName: \"kubernetes.io/projected/107b8214-a1ff-4bee-96ea-4e3e9c176635-kube-api-access-m8qsb\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.739467 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.767505 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.816118 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-trusted-ca\") pod \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.816156 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rvfq\" (UniqueName: \"kubernetes.io/projected/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-kube-api-access-2rvfq\") pod \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.816179 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-utilities\") pod \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.816217 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6jhn\" (UniqueName: \"kubernetes.io/projected/f296dd0a-7f14-44f1-bd50-368fd1a9f430-kube-api-access-k6jhn\") pod \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.816309 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-operator-metrics\") pod \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\" (UID: \"f296dd0a-7f14-44f1-bd50-368fd1a9f430\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.816333 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-catalog-content\") pod \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\" (UID: \"b5ac3fee-d987-40bd-afd8-c85d8c0311e7\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.817023 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "f296dd0a-7f14-44f1-bd50-368fd1a9f430" (UID: "f296dd0a-7f14-44f1-bd50-368fd1a9f430"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.817041 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-utilities" (OuterVolumeSpecName: "utilities") pod "b5ac3fee-d987-40bd-afd8-c85d8c0311e7" (UID: "b5ac3fee-d987-40bd-afd8-c85d8c0311e7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.819143 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f296dd0a-7f14-44f1-bd50-368fd1a9f430-kube-api-access-k6jhn" (OuterVolumeSpecName: "kube-api-access-k6jhn") pod "f296dd0a-7f14-44f1-bd50-368fd1a9f430" (UID: "f296dd0a-7f14-44f1-bd50-368fd1a9f430"). InnerVolumeSpecName "kube-api-access-k6jhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.819216 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "f296dd0a-7f14-44f1-bd50-368fd1a9f430" (UID: "f296dd0a-7f14-44f1-bd50-368fd1a9f430"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.821612 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-kube-api-access-2rvfq" (OuterVolumeSpecName: "kube-api-access-2rvfq") pod "b5ac3fee-d987-40bd-afd8-c85d8c0311e7" (UID: "b5ac3fee-d987-40bd-afd8-c85d8c0311e7"). InnerVolumeSpecName "kube-api-access-2rvfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917484 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-catalog-content\") pod \"f033c93d-ce31-465a-8466-049ff04809ff\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917530 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-catalog-content\") pod \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917553 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gstdj\" (UniqueName: \"kubernetes.io/projected/f033c93d-ce31-465a-8466-049ff04809ff-kube-api-access-gstdj\") pod \"f033c93d-ce31-465a-8466-049ff04809ff\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917586 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-utilities\") pod \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917639 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfzqt\" (UniqueName: \"kubernetes.io/projected/4de573e8-81fb-425f-ac06-4eed5a1f8e78-kube-api-access-hfzqt\") pod \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\" (UID: \"4de573e8-81fb-425f-ac06-4eed5a1f8e78\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917663 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-utilities\") pod \"f033c93d-ce31-465a-8466-049ff04809ff\" (UID: \"f033c93d-ce31-465a-8466-049ff04809ff\") " Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917856 4768 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917866 4768 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f296dd0a-7f14-44f1-bd50-368fd1a9f430-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917877 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rvfq\" (UniqueName: \"kubernetes.io/projected/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-kube-api-access-2rvfq\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917889 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.917896 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6jhn\" (UniqueName: \"kubernetes.io/projected/f296dd0a-7f14-44f1-bd50-368fd1a9f430-kube-api-access-k6jhn\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.918736 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-utilities" (OuterVolumeSpecName: "utilities") pod "4de573e8-81fb-425f-ac06-4eed5a1f8e78" (UID: "4de573e8-81fb-425f-ac06-4eed5a1f8e78"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.919292 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-utilities" (OuterVolumeSpecName: "utilities") pod "f033c93d-ce31-465a-8466-049ff04809ff" (UID: "f033c93d-ce31-465a-8466-049ff04809ff"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.921037 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4de573e8-81fb-425f-ac06-4eed5a1f8e78-kube-api-access-hfzqt" (OuterVolumeSpecName: "kube-api-access-hfzqt") pod "4de573e8-81fb-425f-ac06-4eed5a1f8e78" (UID: "4de573e8-81fb-425f-ac06-4eed5a1f8e78"). InnerVolumeSpecName "kube-api-access-hfzqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.921832 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f033c93d-ce31-465a-8466-049ff04809ff-kube-api-access-gstdj" (OuterVolumeSpecName: "kube-api-access-gstdj") pod "f033c93d-ce31-465a-8466-049ff04809ff" (UID: "f033c93d-ce31-465a-8466-049ff04809ff"). InnerVolumeSpecName "kube-api-access-gstdj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.930888 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5ac3fee-d987-40bd-afd8-c85d8c0311e7" (UID: "b5ac3fee-d987-40bd-afd8-c85d8c0311e7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.960725 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f033c93d-ce31-465a-8466-049ff04809ff" (UID: "f033c93d-ce31-465a-8466-049ff04809ff"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:13 crc kubenswrapper[4768]: I1203 16:24:13.968518 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4de573e8-81fb-425f-ac06-4eed5a1f8e78" (UID: "4de573e8-81fb-425f-ac06-4eed5a1f8e78"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.018784 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5ac3fee-d987-40bd-afd8-c85d8c0311e7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.018844 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.018863 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.018883 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gstdj\" (UniqueName: \"kubernetes.io/projected/f033c93d-ce31-465a-8466-049ff04809ff-kube-api-access-gstdj\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.018904 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4de573e8-81fb-425f-ac06-4eed5a1f8e78-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.018923 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfzqt\" (UniqueName: \"kubernetes.io/projected/4de573e8-81fb-425f-ac06-4eed5a1f8e78-kube-api-access-hfzqt\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.018939 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f033c93d-ce31-465a-8466-049ff04809ff-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.415296 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nt5hc" event={"ID":"107b8214-a1ff-4bee-96ea-4e3e9c176635","Type":"ContainerDied","Data":"0e29314526e8ed160c084bd393cffdf76a295a4057bd55eba443db5f4df366f7"} Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.415371 4768 scope.go:117] "RemoveContainer" containerID="93ea4a973604af99409a6a1b3961fa3c530d8f98692f161460f07a1a72f19a18" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.415540 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nt5hc" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.421413 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" event={"ID":"60d2c487-bb7a-43ee-a699-906a81e5627d","Type":"ContainerStarted","Data":"44cbd632e369b87315d302d4e8ced6c57062b12b3c906514a67f16f9d4df967e"} Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.421651 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.430050 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.432478 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dnd7v" event={"ID":"4de573e8-81fb-425f-ac06-4eed5a1f8e78","Type":"ContainerDied","Data":"698c583040d03b1c7c0f26a5da136e1b3c26f758eba5aa1e4eedb038e9c949e4"} Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.432687 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dnd7v" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.437739 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tpql2" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.437793 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tpql2" event={"ID":"b5ac3fee-d987-40bd-afd8-c85d8c0311e7","Type":"ContainerDied","Data":"92608616385aebe48ae747bbfc488c78932d98b6a742c8a9c8283ef2007ad1b8"} Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.441352 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.441378 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9zxmd" event={"ID":"f296dd0a-7f14-44f1-bd50-368fd1a9f430","Type":"ContainerDied","Data":"9b5d00c498e3cfef2e1259e89bad9d7a15cfb81150c56aac223aafa8e8ff5e96"} Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.463240 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-5xt59" event={"ID":"f033c93d-ce31-465a-8466-049ff04809ff","Type":"ContainerDied","Data":"868677f54aef8258632743b390e6b76857f15078a1938a4978b801f82af37696"} Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.464536 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-5xt59" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.474805 4768 scope.go:117] "RemoveContainer" containerID="ebdc38559f2abfadd65b1e05bf3bcd968840ed766c9b80b4e58c88f6ffbbd102" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.476983 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-9jljw" podStartSLOduration=2.476954559 podStartE2EDuration="2.476954559s" podCreationTimestamp="2025-12-03 16:24:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:24:14.447530261 +0000 UTC m=+351.366866714" watchObservedRunningTime="2025-12-03 16:24:14.476954559 +0000 UTC m=+351.396291012" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.520308 4768 scope.go:117] "RemoveContainer" containerID="f8172614e9d04285defb75f21040784ae76da1053f438c95e8188d2c5ece0e76" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.543734 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nt5hc"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.544110 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nt5hc"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.575835 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tpql2"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.580411 4768 scope.go:117] "RemoveContainer" containerID="7d06d05bc1476a976b76c7d881f0d610df8c44db84471be2cc31bf707dac9319" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.584454 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tpql2"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.589946 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9zxmd"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.594397 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9zxmd"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.608020 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dnd7v"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.617276 4768 scope.go:117] "RemoveContainer" containerID="beb8d82e5b5a854ed9d9ee12971d9ae336123c48eca7c79557c1b0ac9d637d5d" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.617401 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dnd7v"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.620352 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-5xt59"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.623759 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-5xt59"] Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.631991 4768 scope.go:117] "RemoveContainer" containerID="f6fc37b13102abc589d535c53e590a8462a2343cbe92ddf4e61ea889013db9c3" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.656533 4768 scope.go:117] "RemoveContainer" containerID="27dd9452f96f6c2b902ebd487915dc2da7b81bb82a507051b688871cd90a25b9" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.678226 4768 scope.go:117] "RemoveContainer" containerID="825e3e5d06919255cbc3639790ba2655bdef4689cd4643da1ca0c653c11dbc43" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.706823 4768 scope.go:117] "RemoveContainer" containerID="9518b8d02f852c0919daa22a13b80e8c601fd17867c2e708c0c08a9c849e9de7" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.728044 4768 scope.go:117] "RemoveContainer" containerID="a7270c904125c21a4ac078512b8e8ad09de5b5c23e14202714b619424edb0548" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.745117 4768 scope.go:117] "RemoveContainer" containerID="080e86850cc79c429b494e285e4c7f940a36b0f0480e020fe8456ec33f04db2a" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.761177 4768 scope.go:117] "RemoveContainer" containerID="d171a94a51f66f1ec537c5fa7cecff2b56619ea9800974d2d986c6be5b09c2f4" Dec 03 16:24:14 crc kubenswrapper[4768]: I1203 16:24:14.777638 4768 scope.go:117] "RemoveContainer" containerID="b3f6104c93d94461c95761f436c7ba89aa21c4dde84d5fbe8006bdddab8009c9" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.540569 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" path="/var/lib/kubelet/pods/107b8214-a1ff-4bee-96ea-4e3e9c176635/volumes" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.543549 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" path="/var/lib/kubelet/pods/4de573e8-81fb-425f-ac06-4eed5a1f8e78/volumes" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.544960 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" path="/var/lib/kubelet/pods/b5ac3fee-d987-40bd-afd8-c85d8c0311e7/volumes" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.546462 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f033c93d-ce31-465a-8466-049ff04809ff" path="/var/lib/kubelet/pods/f033c93d-ce31-465a-8466-049ff04809ff/volumes" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.549651 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" path="/var/lib/kubelet/pods/f296dd0a-7f14-44f1-bd50-368fd1a9f430/volumes" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.713336 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pwrl8"] Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.714545 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.714757 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.714898 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" containerName="marketplace-operator" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.715022 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" containerName="marketplace-operator" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.715139 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.715259 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.715383 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.715503 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.715644 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.715760 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.715891 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.716014 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.716131 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.716239 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.716363 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.716484 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.716626 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.716748 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.716866 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.716976 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="extract-content" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.717114 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.717251 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="extract-utilities" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.717416 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.717734 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: E1203 16:24:15.717918 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.718084 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.718408 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f033c93d-ce31-465a-8466-049ff04809ff" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.718562 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="107b8214-a1ff-4bee-96ea-4e3e9c176635" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.718744 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f296dd0a-7f14-44f1-bd50-368fd1a9f430" containerName="marketplace-operator" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.718878 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4de573e8-81fb-425f-ac06-4eed5a1f8e78" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.718999 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5ac3fee-d987-40bd-afd8-c85d8c0311e7" containerName="registry-server" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.721018 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.724467 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.724692 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwrl8"] Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.748306 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/737e36ee-70f0-4076-af72-83d09a86268b-utilities\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.748345 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/737e36ee-70f0-4076-af72-83d09a86268b-catalog-content\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.748385 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zznxx\" (UniqueName: \"kubernetes.io/projected/737e36ee-70f0-4076-af72-83d09a86268b-kube-api-access-zznxx\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.849694 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/737e36ee-70f0-4076-af72-83d09a86268b-utilities\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.849753 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/737e36ee-70f0-4076-af72-83d09a86268b-catalog-content\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.849832 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zznxx\" (UniqueName: \"kubernetes.io/projected/737e36ee-70f0-4076-af72-83d09a86268b-kube-api-access-zznxx\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.850900 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/737e36ee-70f0-4076-af72-83d09a86268b-utilities\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.851311 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/737e36ee-70f0-4076-af72-83d09a86268b-catalog-content\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:15 crc kubenswrapper[4768]: I1203 16:24:15.875804 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zznxx\" (UniqueName: \"kubernetes.io/projected/737e36ee-70f0-4076-af72-83d09a86268b-kube-api-access-zznxx\") pod \"redhat-marketplace-pwrl8\" (UID: \"737e36ee-70f0-4076-af72-83d09a86268b\") " pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:16 crc kubenswrapper[4768]: I1203 16:24:16.058571 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:16 crc kubenswrapper[4768]: I1203 16:24:16.502539 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwrl8"] Dec 03 16:24:16 crc kubenswrapper[4768]: W1203 16:24:16.509279 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod737e36ee_70f0_4076_af72_83d09a86268b.slice/crio-9b8b98a7a74ac6f8d3d831c91417f73114d6dff24fb9a655cad88e1c7eccfaab WatchSource:0}: Error finding container 9b8b98a7a74ac6f8d3d831c91417f73114d6dff24fb9a655cad88e1c7eccfaab: Status 404 returned error can't find the container with id 9b8b98a7a74ac6f8d3d831c91417f73114d6dff24fb9a655cad88e1c7eccfaab Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.109319 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q4gkq"] Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.111094 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.119007 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.119183 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q4gkq"] Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.266196 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1e80c42-cfda-453e-8634-a8e2ad23991f-utilities\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.266819 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1e80c42-cfda-453e-8634-a8e2ad23991f-catalog-content\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.266882 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rlk9\" (UniqueName: \"kubernetes.io/projected/e1e80c42-cfda-453e-8634-a8e2ad23991f-kube-api-access-6rlk9\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.367568 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1e80c42-cfda-453e-8634-a8e2ad23991f-catalog-content\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.368696 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rlk9\" (UniqueName: \"kubernetes.io/projected/e1e80c42-cfda-453e-8634-a8e2ad23991f-kube-api-access-6rlk9\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.368646 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e1e80c42-cfda-453e-8634-a8e2ad23991f-catalog-content\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.368790 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1e80c42-cfda-453e-8634-a8e2ad23991f-utilities\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.369081 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e1e80c42-cfda-453e-8634-a8e2ad23991f-utilities\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.394950 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rlk9\" (UniqueName: \"kubernetes.io/projected/e1e80c42-cfda-453e-8634-a8e2ad23991f-kube-api-access-6rlk9\") pod \"redhat-operators-q4gkq\" (UID: \"e1e80c42-cfda-453e-8634-a8e2ad23991f\") " pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.437724 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.501078 4768 generic.go:334] "Generic (PLEG): container finished" podID="737e36ee-70f0-4076-af72-83d09a86268b" containerID="76b919f43aa041fc35f3afb6eb11e342459cd52d66e27e356eb78c9dac411e4f" exitCode=0 Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.501328 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwrl8" event={"ID":"737e36ee-70f0-4076-af72-83d09a86268b","Type":"ContainerDied","Data":"76b919f43aa041fc35f3afb6eb11e342459cd52d66e27e356eb78c9dac411e4f"} Dec 03 16:24:17 crc kubenswrapper[4768]: I1203 16:24:17.501431 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwrl8" event={"ID":"737e36ee-70f0-4076-af72-83d09a86268b","Type":"ContainerStarted","Data":"9b8b98a7a74ac6f8d3d831c91417f73114d6dff24fb9a655cad88e1c7eccfaab"} Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.108827 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-z8vgf"] Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.109897 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.113801 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.131304 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z8vgf"] Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.281762 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cpft\" (UniqueName: \"kubernetes.io/projected/f9578af8-3d78-4487-a6e0-57d79ebe218e-kube-api-access-8cpft\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.282040 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9578af8-3d78-4487-a6e0-57d79ebe218e-utilities\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.282065 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9578af8-3d78-4487-a6e0-57d79ebe218e-catalog-content\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.383706 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cpft\" (UniqueName: \"kubernetes.io/projected/f9578af8-3d78-4487-a6e0-57d79ebe218e-kube-api-access-8cpft\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.383870 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9578af8-3d78-4487-a6e0-57d79ebe218e-utilities\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.383963 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9578af8-3d78-4487-a6e0-57d79ebe218e-catalog-content\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.384613 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9578af8-3d78-4487-a6e0-57d79ebe218e-catalog-content\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.384585 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9578af8-3d78-4487-a6e0-57d79ebe218e-utilities\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.390574 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q4gkq"] Dec 03 16:24:18 crc kubenswrapper[4768]: W1203 16:24:18.398515 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1e80c42_cfda_453e_8634_a8e2ad23991f.slice/crio-bf923edafffade8cb4e9e8bffa842a4110755a98216e602549b5baac67bef11d WatchSource:0}: Error finding container bf923edafffade8cb4e9e8bffa842a4110755a98216e602549b5baac67bef11d: Status 404 returned error can't find the container with id bf923edafffade8cb4e9e8bffa842a4110755a98216e602549b5baac67bef11d Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.412710 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cpft\" (UniqueName: \"kubernetes.io/projected/f9578af8-3d78-4487-a6e0-57d79ebe218e-kube-api-access-8cpft\") pod \"certified-operators-z8vgf\" (UID: \"f9578af8-3d78-4487-a6e0-57d79ebe218e\") " pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.431454 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.517991 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q4gkq" event={"ID":"e1e80c42-cfda-453e-8634-a8e2ad23991f","Type":"ContainerStarted","Data":"bf923edafffade8cb4e9e8bffa842a4110755a98216e602549b5baac67bef11d"} Dec 03 16:24:18 crc kubenswrapper[4768]: I1203 16:24:18.853659 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-z8vgf"] Dec 03 16:24:18 crc kubenswrapper[4768]: W1203 16:24:18.857491 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9578af8_3d78_4487_a6e0_57d79ebe218e.slice/crio-ef26dadffaa1c886770d07c533b0b05a73c8c5186f69862ef061b9742cb84cba WatchSource:0}: Error finding container ef26dadffaa1c886770d07c533b0b05a73c8c5186f69862ef061b9742cb84cba: Status 404 returned error can't find the container with id ef26dadffaa1c886770d07c533b0b05a73c8c5186f69862ef061b9742cb84cba Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.504781 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-45zmq"] Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.507205 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.509110 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.516697 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45zmq"] Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.526452 4768 generic.go:334] "Generic (PLEG): container finished" podID="e1e80c42-cfda-453e-8634-a8e2ad23991f" containerID="7715db134eab7be49b1ba0317e36339c78e70335a5b92776d839d3e8969a6d0f" exitCode=0 Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.526505 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q4gkq" event={"ID":"e1e80c42-cfda-453e-8634-a8e2ad23991f","Type":"ContainerDied","Data":"7715db134eab7be49b1ba0317e36339c78e70335a5b92776d839d3e8969a6d0f"} Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.537394 4768 generic.go:334] "Generic (PLEG): container finished" podID="f9578af8-3d78-4487-a6e0-57d79ebe218e" containerID="f1a0a0258d9378482ad0105bf06b3a766e5597138f0b44d131a56c5fbd6aa87b" exitCode=0 Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.540913 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8vgf" event={"ID":"f9578af8-3d78-4487-a6e0-57d79ebe218e","Type":"ContainerDied","Data":"f1a0a0258d9378482ad0105bf06b3a766e5597138f0b44d131a56c5fbd6aa87b"} Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.540960 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8vgf" event={"ID":"f9578af8-3d78-4487-a6e0-57d79ebe218e","Type":"ContainerStarted","Data":"ef26dadffaa1c886770d07c533b0b05a73c8c5186f69862ef061b9742cb84cba"} Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.700257 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn5xw\" (UniqueName: \"kubernetes.io/projected/04d4fd48-11a1-422c-bde1-221ca8b0a748-kube-api-access-kn5xw\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.700636 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-utilities\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.700667 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-catalog-content\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.801288 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn5xw\" (UniqueName: \"kubernetes.io/projected/04d4fd48-11a1-422c-bde1-221ca8b0a748-kube-api-access-kn5xw\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.801342 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-utilities\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.801367 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-catalog-content\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.802009 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-catalog-content\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.802063 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-utilities\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.824792 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn5xw\" (UniqueName: \"kubernetes.io/projected/04d4fd48-11a1-422c-bde1-221ca8b0a748-kube-api-access-kn5xw\") pod \"community-operators-45zmq\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:19 crc kubenswrapper[4768]: I1203 16:24:19.859735 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.278166 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45zmq"] Dec 03 16:24:20 crc kubenswrapper[4768]: W1203 16:24:20.282850 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice/crio-07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4 WatchSource:0}: Error finding container 07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4: Status 404 returned error can't find the container with id 07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4 Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.546299 4768 generic.go:334] "Generic (PLEG): container finished" podID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerID="3f2f9cb3ed310677e8e86e5b23a1bea03fdd9ead37120353688e68cadbb40310" exitCode=0 Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.546354 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45zmq" event={"ID":"04d4fd48-11a1-422c-bde1-221ca8b0a748","Type":"ContainerDied","Data":"3f2f9cb3ed310677e8e86e5b23a1bea03fdd9ead37120353688e68cadbb40310"} Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.548498 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45zmq" event={"ID":"04d4fd48-11a1-422c-bde1-221ca8b0a748","Type":"ContainerStarted","Data":"07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4"} Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.552991 4768 generic.go:334] "Generic (PLEG): container finished" podID="f9578af8-3d78-4487-a6e0-57d79ebe218e" containerID="be71055ba231cb5a26d0fb08bfe0c52c68989f84752f8be5650041eab527d579" exitCode=0 Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.553041 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8vgf" event={"ID":"f9578af8-3d78-4487-a6e0-57d79ebe218e","Type":"ContainerDied","Data":"be71055ba231cb5a26d0fb08bfe0c52c68989f84752f8be5650041eab527d579"} Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.556017 4768 generic.go:334] "Generic (PLEG): container finished" podID="737e36ee-70f0-4076-af72-83d09a86268b" containerID="152e56d8e0b32f3102de91aca936112df1f8dd4e0e69ddd13c2d499690c27106" exitCode=0 Dec 03 16:24:20 crc kubenswrapper[4768]: I1203 16:24:20.556137 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwrl8" event={"ID":"737e36ee-70f0-4076-af72-83d09a86268b","Type":"ContainerDied","Data":"152e56d8e0b32f3102de91aca936112df1f8dd4e0e69ddd13c2d499690c27106"} Dec 03 16:24:21 crc kubenswrapper[4768]: I1203 16:24:21.564391 4768 generic.go:334] "Generic (PLEG): container finished" podID="e1e80c42-cfda-453e-8634-a8e2ad23991f" containerID="8cd178b93b27dd3b402571ea88f15a2584e4acb179924dd0ab68ba5e72ed7668" exitCode=0 Dec 03 16:24:21 crc kubenswrapper[4768]: I1203 16:24:21.564501 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q4gkq" event={"ID":"e1e80c42-cfda-453e-8634-a8e2ad23991f","Type":"ContainerDied","Data":"8cd178b93b27dd3b402571ea88f15a2584e4acb179924dd0ab68ba5e72ed7668"} Dec 03 16:24:21 crc kubenswrapper[4768]: I1203 16:24:21.569282 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45zmq" event={"ID":"04d4fd48-11a1-422c-bde1-221ca8b0a748","Type":"ContainerStarted","Data":"df6fee5843168efc4b483fbd4eefb39f99a2a304d942c1bcc6a27f80a30e1f4b"} Dec 03 16:24:21 crc kubenswrapper[4768]: I1203 16:24:21.578342 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-z8vgf" event={"ID":"f9578af8-3d78-4487-a6e0-57d79ebe218e","Type":"ContainerStarted","Data":"eea5a22bd9fc6cf1f9589909839d2caaa751459126425ad024803049d0dc216e"} Dec 03 16:24:21 crc kubenswrapper[4768]: I1203 16:24:21.583421 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwrl8" event={"ID":"737e36ee-70f0-4076-af72-83d09a86268b","Type":"ContainerStarted","Data":"dfa5c027876ac1c28e507487c5951f91692f2ccc9236b7e9a04262e6bf4d6b3e"} Dec 03 16:24:21 crc kubenswrapper[4768]: I1203 16:24:21.626041 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-z8vgf" podStartSLOduration=2.198226687 podStartE2EDuration="3.626020589s" podCreationTimestamp="2025-12-03 16:24:18 +0000 UTC" firstStartedPulling="2025-12-03 16:24:19.538974912 +0000 UTC m=+356.458311335" lastFinishedPulling="2025-12-03 16:24:20.966768804 +0000 UTC m=+357.886105237" observedRunningTime="2025-12-03 16:24:21.620908043 +0000 UTC m=+358.540244476" watchObservedRunningTime="2025-12-03 16:24:21.626020589 +0000 UTC m=+358.545357012" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.012125 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pwrl8" podStartSLOduration=3.578504306 podStartE2EDuration="7.01211077s" podCreationTimestamp="2025-12-03 16:24:15 +0000 UTC" firstStartedPulling="2025-12-03 16:24:17.503156335 +0000 UTC m=+354.422492758" lastFinishedPulling="2025-12-03 16:24:20.936762809 +0000 UTC m=+357.856099222" observedRunningTime="2025-12-03 16:24:21.643869147 +0000 UTC m=+358.563205580" watchObservedRunningTime="2025-12-03 16:24:22.01211077 +0000 UTC m=+358.931447183" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.012647 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb"] Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.012882 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" podUID="13080004-259f-432b-b14a-664d5d0318fa" containerName="route-controller-manager" containerID="cri-o://267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334" gracePeriod=30 Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.522357 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.590777 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q4gkq" event={"ID":"e1e80c42-cfda-453e-8634-a8e2ad23991f","Type":"ContainerStarted","Data":"b286c0e4062ed6a21e29f0c4cb31c3379d2f6bdbbd1274ad9a0cc7a688c21293"} Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.592188 4768 generic.go:334] "Generic (PLEG): container finished" podID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerID="df6fee5843168efc4b483fbd4eefb39f99a2a304d942c1bcc6a27f80a30e1f4b" exitCode=0 Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.592236 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45zmq" event={"ID":"04d4fd48-11a1-422c-bde1-221ca8b0a748","Type":"ContainerDied","Data":"df6fee5843168efc4b483fbd4eefb39f99a2a304d942c1bcc6a27f80a30e1f4b"} Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.593580 4768 generic.go:334] "Generic (PLEG): container finished" podID="13080004-259f-432b-b14a-664d5d0318fa" containerID="267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334" exitCode=0 Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.593630 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" event={"ID":"13080004-259f-432b-b14a-664d5d0318fa","Type":"ContainerDied","Data":"267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334"} Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.593672 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" event={"ID":"13080004-259f-432b-b14a-664d5d0318fa","Type":"ContainerDied","Data":"6dc30973dc3a5bb3e502c81627c1ca0996d28a64dc697156f053324138832848"} Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.593689 4768 scope.go:117] "RemoveContainer" containerID="267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.593643 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.607740 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q4gkq" podStartSLOduration=3.203354392 podStartE2EDuration="5.60772615s" podCreationTimestamp="2025-12-03 16:24:17 +0000 UTC" firstStartedPulling="2025-12-03 16:24:19.528552035 +0000 UTC m=+356.447888458" lastFinishedPulling="2025-12-03 16:24:21.932923793 +0000 UTC m=+358.852260216" observedRunningTime="2025-12-03 16:24:22.605645331 +0000 UTC m=+359.524981754" watchObservedRunningTime="2025-12-03 16:24:22.60772615 +0000 UTC m=+359.527062573" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.625750 4768 scope.go:117] "RemoveContainer" containerID="267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334" Dec 03 16:24:22 crc kubenswrapper[4768]: E1203 16:24:22.626191 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334\": container with ID starting with 267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334 not found: ID does not exist" containerID="267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.626236 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334"} err="failed to get container status \"267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334\": rpc error: code = NotFound desc = could not find container \"267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334\": container with ID starting with 267d02b9146dffc7cad58672b7cd2b9a40b6d4b0aa87c46241972093115b2334 not found: ID does not exist" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.638754 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89ttp\" (UniqueName: \"kubernetes.io/projected/13080004-259f-432b-b14a-664d5d0318fa-kube-api-access-89ttp\") pod \"13080004-259f-432b-b14a-664d5d0318fa\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.638844 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13080004-259f-432b-b14a-664d5d0318fa-serving-cert\") pod \"13080004-259f-432b-b14a-664d5d0318fa\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.638881 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-config\") pod \"13080004-259f-432b-b14a-664d5d0318fa\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.638918 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-client-ca\") pod \"13080004-259f-432b-b14a-664d5d0318fa\" (UID: \"13080004-259f-432b-b14a-664d5d0318fa\") " Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.639793 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-client-ca" (OuterVolumeSpecName: "client-ca") pod "13080004-259f-432b-b14a-664d5d0318fa" (UID: "13080004-259f-432b-b14a-664d5d0318fa"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.639809 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-config" (OuterVolumeSpecName: "config") pod "13080004-259f-432b-b14a-664d5d0318fa" (UID: "13080004-259f-432b-b14a-664d5d0318fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.644852 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13080004-259f-432b-b14a-664d5d0318fa-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "13080004-259f-432b-b14a-664d5d0318fa" (UID: "13080004-259f-432b-b14a-664d5d0318fa"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.644920 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13080004-259f-432b-b14a-664d5d0318fa-kube-api-access-89ttp" (OuterVolumeSpecName: "kube-api-access-89ttp") pod "13080004-259f-432b-b14a-664d5d0318fa" (UID: "13080004-259f-432b-b14a-664d5d0318fa"). InnerVolumeSpecName "kube-api-access-89ttp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.740246 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/13080004-259f-432b-b14a-664d5d0318fa-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.740295 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.740306 4768 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/13080004-259f-432b-b14a-664d5d0318fa-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:22 crc kubenswrapper[4768]: I1203 16:24:22.740317 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89ttp\" (UniqueName: \"kubernetes.io/projected/13080004-259f-432b-b14a-664d5d0318fa-kube-api-access-89ttp\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.167839 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9"] Dec 03 16:24:23 crc kubenswrapper[4768]: E1203 16:24:23.168413 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13080004-259f-432b-b14a-664d5d0318fa" containerName="route-controller-manager" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.168430 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="13080004-259f-432b-b14a-664d5d0318fa" containerName="route-controller-manager" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.168563 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="13080004-259f-432b-b14a-664d5d0318fa" containerName="route-controller-manager" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.169025 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.172640 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.172709 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.172640 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.173020 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.173165 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.173419 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.181174 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9"] Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.348339 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-config\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.348380 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-serving-cert\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.348425 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhr8n\" (UniqueName: \"kubernetes.io/projected/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-kube-api-access-hhr8n\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.348446 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-client-ca\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.450125 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-client-ca\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.450212 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-config\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.450235 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-serving-cert\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.450272 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhr8n\" (UniqueName: \"kubernetes.io/projected/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-kube-api-access-hhr8n\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.451680 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-client-ca\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.452363 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-config\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.456404 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-serving-cert\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.474840 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.485447 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.498628 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhr8n\" (UniqueName: \"kubernetes.io/projected/a5be08f5-446d-4f1b-a81f-6ae591d83ec9-kube-api-access-hhr8n\") pod \"route-controller-manager-77b68489c8-s4qt9\" (UID: \"a5be08f5-446d-4f1b-a81f-6ae591d83ec9\") " pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.981660 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 03 16:24:23 crc kubenswrapper[4768]: I1203 16:24:23.990343 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.410701 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9"] Dec 03 16:24:24 crc kubenswrapper[4768]: W1203 16:24:24.419625 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5be08f5_446d_4f1b_a81f_6ae591d83ec9.slice/crio-bacef4b9380518c5abca1c87b022fdce9e94c8041a7eb27342fbe9dae3a98f3e WatchSource:0}: Error finding container bacef4b9380518c5abca1c87b022fdce9e94c8041a7eb27342fbe9dae3a98f3e: Status 404 returned error can't find the container with id bacef4b9380518c5abca1c87b022fdce9e94c8041a7eb27342fbe9dae3a98f3e Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.607741 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" event={"ID":"a5be08f5-446d-4f1b-a81f-6ae591d83ec9","Type":"ContainerStarted","Data":"917a3b0222625e7732f1714954f33be7459295b7d8cfdfbaa3da8c26a519c19b"} Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.607781 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" event={"ID":"a5be08f5-446d-4f1b-a81f-6ae591d83ec9","Type":"ContainerStarted","Data":"bacef4b9380518c5abca1c87b022fdce9e94c8041a7eb27342fbe9dae3a98f3e"} Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.607897 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.610179 4768 patch_prober.go:28] interesting pod/route-controller-manager-77b68489c8-s4qt9 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" start-of-body= Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.610240 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" podUID="a5be08f5-446d-4f1b-a81f-6ae591d83ec9" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.65:8443/healthz\": dial tcp 10.217.0.65:8443: connect: connection refused" Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.610910 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45zmq" event={"ID":"04d4fd48-11a1-422c-bde1-221ca8b0a748","Type":"ContainerStarted","Data":"49c0099eecd6eb30db379a06867a122cb67adffc90da4702f4fe6b6a4216e177"} Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.624313 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" podStartSLOduration=2.624293399 podStartE2EDuration="2.624293399s" podCreationTimestamp="2025-12-03 16:24:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:24:24.622764355 +0000 UTC m=+361.542100798" watchObservedRunningTime="2025-12-03 16:24:24.624293399 +0000 UTC m=+361.543629822" Dec 03 16:24:24 crc kubenswrapper[4768]: I1203 16:24:24.637922 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-45zmq" podStartSLOduration=2.994602391 podStartE2EDuration="5.637899867s" podCreationTimestamp="2025-12-03 16:24:19 +0000 UTC" firstStartedPulling="2025-12-03 16:24:20.55037996 +0000 UTC m=+357.469716383" lastFinishedPulling="2025-12-03 16:24:23.193677436 +0000 UTC m=+360.113013859" observedRunningTime="2025-12-03 16:24:24.637033012 +0000 UTC m=+361.556369445" watchObservedRunningTime="2025-12-03 16:24:24.637899867 +0000 UTC m=+361.557236290" Dec 03 16:24:25 crc kubenswrapper[4768]: I1203 16:24:25.619763 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-77b68489c8-s4qt9" Dec 03 16:24:26 crc kubenswrapper[4768]: I1203 16:24:26.027971 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:24:26 crc kubenswrapper[4768]: I1203 16:24:26.028031 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:24:26 crc kubenswrapper[4768]: I1203 16:24:26.058936 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:26 crc kubenswrapper[4768]: I1203 16:24:26.060020 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:26 crc kubenswrapper[4768]: I1203 16:24:26.102746 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:26 crc kubenswrapper[4768]: I1203 16:24:26.668254 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pwrl8" Dec 03 16:24:27 crc kubenswrapper[4768]: I1203 16:24:27.438957 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:27 crc kubenswrapper[4768]: I1203 16:24:27.440940 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:27 crc kubenswrapper[4768]: I1203 16:24:27.492521 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:27 crc kubenswrapper[4768]: I1203 16:24:27.665113 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q4gkq" Dec 03 16:24:28 crc kubenswrapper[4768]: I1203 16:24:28.432976 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:28 crc kubenswrapper[4768]: I1203 16:24:28.433276 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:28 crc kubenswrapper[4768]: I1203 16:24:28.481867 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:28 crc kubenswrapper[4768]: I1203 16:24:28.664995 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-z8vgf" Dec 03 16:24:29 crc kubenswrapper[4768]: I1203 16:24:29.866865 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:29 crc kubenswrapper[4768]: I1203 16:24:29.866952 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:29 crc kubenswrapper[4768]: I1203 16:24:29.923187 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:30 crc kubenswrapper[4768]: I1203 16:24:30.682295 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-45zmq" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.741093 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4zst4"] Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.742346 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.775654 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4zst4"] Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.911485 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgqfd\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-kube-api-access-qgqfd\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.911903 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.912103 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/04c4198c-3300-4863-8b02-e999d9564f0f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.912308 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/04c4198c-3300-4863-8b02-e999d9564f0f-registry-certificates\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.912505 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-bound-sa-token\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.912754 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/04c4198c-3300-4863-8b02-e999d9564f0f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.912974 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-registry-tls\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.913194 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04c4198c-3300-4863-8b02-e999d9564f0f-trusted-ca\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:41 crc kubenswrapper[4768]: I1203 16:24:41.935633 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.014786 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04c4198c-3300-4863-8b02-e999d9564f0f-trusted-ca\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.015931 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgqfd\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-kube-api-access-qgqfd\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.015998 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/04c4198c-3300-4863-8b02-e999d9564f0f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.016028 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/04c4198c-3300-4863-8b02-e999d9564f0f-registry-certificates\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.016070 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-bound-sa-token\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.016149 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/04c4198c-3300-4863-8b02-e999d9564f0f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.016246 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-registry-tls\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.017082 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/04c4198c-3300-4863-8b02-e999d9564f0f-ca-trust-extracted\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.017447 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04c4198c-3300-4863-8b02-e999d9564f0f-trusted-ca\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.018639 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/04c4198c-3300-4863-8b02-e999d9564f0f-registry-certificates\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.024830 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-registry-tls\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.026778 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/04c4198c-3300-4863-8b02-e999d9564f0f-installation-pull-secrets\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.032706 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5c686f9869-h88g6"] Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.032999 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" podUID="4e6cf345-5855-403f-98f2-f242e2a4c5c1" containerName="controller-manager" containerID="cri-o://1bbc8fc5c42750f9bfb55ffef09beb13932d041145c06c831c8eba7744882295" gracePeriod=30 Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.049401 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgqfd\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-kube-api-access-qgqfd\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.061916 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/04c4198c-3300-4863-8b02-e999d9564f0f-bound-sa-token\") pod \"image-registry-66df7c8f76-4zst4\" (UID: \"04c4198c-3300-4863-8b02-e999d9564f0f\") " pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.070815 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.489780 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-4zst4"] Dec 03 16:24:42 crc kubenswrapper[4768]: W1203 16:24:42.492094 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04c4198c_3300_4863_8b02_e999d9564f0f.slice/crio-3f7e8ae266690e68f1217345d6a2bff97237f53d259a27d06b5b210538389ca9 WatchSource:0}: Error finding container 3f7e8ae266690e68f1217345d6a2bff97237f53d259a27d06b5b210538389ca9: Status 404 returned error can't find the container with id 3f7e8ae266690e68f1217345d6a2bff97237f53d259a27d06b5b210538389ca9 Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.714874 4768 generic.go:334] "Generic (PLEG): container finished" podID="4e6cf345-5855-403f-98f2-f242e2a4c5c1" containerID="1bbc8fc5c42750f9bfb55ffef09beb13932d041145c06c831c8eba7744882295" exitCode=0 Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.714971 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" event={"ID":"4e6cf345-5855-403f-98f2-f242e2a4c5c1","Type":"ContainerDied","Data":"1bbc8fc5c42750f9bfb55ffef09beb13932d041145c06c831c8eba7744882295"} Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.731031 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" event={"ID":"04c4198c-3300-4863-8b02-e999d9564f0f","Type":"ContainerStarted","Data":"3f881c8eead701667cd7c61062b02f9e82271af29febd96783571cab254bd15b"} Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.731099 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" event={"ID":"04c4198c-3300-4863-8b02-e999d9564f0f","Type":"ContainerStarted","Data":"3f7e8ae266690e68f1217345d6a2bff97237f53d259a27d06b5b210538389ca9"} Dec 03 16:24:42 crc kubenswrapper[4768]: I1203 16:24:42.950741 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.031021 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e6cf345-5855-403f-98f2-f242e2a4c5c1-serving-cert\") pod \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.031063 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-proxy-ca-bundles\") pod \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.031085 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-config\") pod \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.031173 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-client-ca\") pod \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.031212 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc662\" (UniqueName: \"kubernetes.io/projected/4e6cf345-5855-403f-98f2-f242e2a4c5c1-kube-api-access-pc662\") pod \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\" (UID: \"4e6cf345-5855-403f-98f2-f242e2a4c5c1\") " Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.031943 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-client-ca" (OuterVolumeSpecName: "client-ca") pod "4e6cf345-5855-403f-98f2-f242e2a4c5c1" (UID: "4e6cf345-5855-403f-98f2-f242e2a4c5c1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.031998 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-config" (OuterVolumeSpecName: "config") pod "4e6cf345-5855-403f-98f2-f242e2a4c5c1" (UID: "4e6cf345-5855-403f-98f2-f242e2a4c5c1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.032561 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4e6cf345-5855-403f-98f2-f242e2a4c5c1" (UID: "4e6cf345-5855-403f-98f2-f242e2a4c5c1"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.036421 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e6cf345-5855-403f-98f2-f242e2a4c5c1-kube-api-access-pc662" (OuterVolumeSpecName: "kube-api-access-pc662") pod "4e6cf345-5855-403f-98f2-f242e2a4c5c1" (UID: "4e6cf345-5855-403f-98f2-f242e2a4c5c1"). InnerVolumeSpecName "kube-api-access-pc662". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.036567 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e6cf345-5855-403f-98f2-f242e2a4c5c1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4e6cf345-5855-403f-98f2-f242e2a4c5c1" (UID: "4e6cf345-5855-403f-98f2-f242e2a4c5c1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.132201 4768 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-client-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.132259 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc662\" (UniqueName: \"kubernetes.io/projected/4e6cf345-5855-403f-98f2-f242e2a4c5c1-kube-api-access-pc662\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.132269 4768 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4e6cf345-5855-403f-98f2-f242e2a4c5c1-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.132279 4768 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.132288 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e6cf345-5855-403f-98f2-f242e2a4c5c1-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.187328 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-c4c498796-6ktb9"] Dec 03 16:24:43 crc kubenswrapper[4768]: E1203 16:24:43.187523 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e6cf345-5855-403f-98f2-f242e2a4c5c1" containerName="controller-manager" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.187538 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e6cf345-5855-403f-98f2-f242e2a4c5c1" containerName="controller-manager" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.187650 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e6cf345-5855-403f-98f2-f242e2a4c5c1" containerName="controller-manager" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.187969 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.200898 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c4c498796-6ktb9"] Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.336903 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-client-ca\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.336980 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-proxy-ca-bundles\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.337013 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-config\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.337038 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-serving-cert\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.337076 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvb9g\" (UniqueName: \"kubernetes.io/projected/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-kube-api-access-nvb9g\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.438234 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-client-ca\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.438330 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-proxy-ca-bundles\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.438367 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-config\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.438393 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-serving-cert\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.438433 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvb9g\" (UniqueName: \"kubernetes.io/projected/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-kube-api-access-nvb9g\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.439897 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-client-ca\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.440071 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-config\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.440412 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-proxy-ca-bundles\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.443762 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-serving-cert\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.469152 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvb9g\" (UniqueName: \"kubernetes.io/projected/ff1dba7f-058c-4bdb-9209-c4aa49da33ff-kube-api-access-nvb9g\") pod \"controller-manager-c4c498796-6ktb9\" (UID: \"ff1dba7f-058c-4bdb-9209-c4aa49da33ff\") " pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.503902 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.739352 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" event={"ID":"4e6cf345-5855-403f-98f2-f242e2a4c5c1","Type":"ContainerDied","Data":"8aa54e57e58d2aa1bbef664489715340c0b44a3c3d9eb5125b437e07d8a27a15"} Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.739765 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.739433 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c686f9869-h88g6" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.739793 4768 scope.go:117] "RemoveContainer" containerID="1bbc8fc5c42750f9bfb55ffef09beb13932d041145c06c831c8eba7744882295" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.771926 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" podStartSLOduration=2.771911916 podStartE2EDuration="2.771911916s" podCreationTimestamp="2025-12-03 16:24:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:24:43.768969043 +0000 UTC m=+380.688305486" watchObservedRunningTime="2025-12-03 16:24:43.771911916 +0000 UTC m=+380.691248339" Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.784134 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5c686f9869-h88g6"] Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.787180 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5c686f9869-h88g6"] Dec 03 16:24:43 crc kubenswrapper[4768]: I1203 16:24:43.941455 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c4c498796-6ktb9"] Dec 03 16:24:43 crc kubenswrapper[4768]: W1203 16:24:43.949143 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff1dba7f_058c_4bdb_9209_c4aa49da33ff.slice/crio-5dd717569bd3320ec5c32efe9fc36664e1c5b1cde06f755cb763cc52c0076b2f WatchSource:0}: Error finding container 5dd717569bd3320ec5c32efe9fc36664e1c5b1cde06f755cb763cc52c0076b2f: Status 404 returned error can't find the container with id 5dd717569bd3320ec5c32efe9fc36664e1c5b1cde06f755cb763cc52c0076b2f Dec 03 16:24:44 crc kubenswrapper[4768]: I1203 16:24:44.748443 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" event={"ID":"ff1dba7f-058c-4bdb-9209-c4aa49da33ff","Type":"ContainerStarted","Data":"5dd717569bd3320ec5c32efe9fc36664e1c5b1cde06f755cb763cc52c0076b2f"} Dec 03 16:24:45 crc kubenswrapper[4768]: I1203 16:24:45.541040 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e6cf345-5855-403f-98f2-f242e2a4c5c1" path="/var/lib/kubelet/pods/4e6cf345-5855-403f-98f2-f242e2a4c5c1/volumes" Dec 03 16:24:45 crc kubenswrapper[4768]: I1203 16:24:45.757613 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" event={"ID":"ff1dba7f-058c-4bdb-9209-c4aa49da33ff","Type":"ContainerStarted","Data":"fb1c62f68306309f4e4973f35a70215df73baa2db307e7c896157124225dd376"} Dec 03 16:24:45 crc kubenswrapper[4768]: I1203 16:24:45.757939 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:45 crc kubenswrapper[4768]: I1203 16:24:45.764370 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" Dec 03 16:24:45 crc kubenswrapper[4768]: I1203 16:24:45.786010 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-c4c498796-6ktb9" podStartSLOduration=3.785982723 podStartE2EDuration="3.785982723s" podCreationTimestamp="2025-12-03 16:24:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:24:45.780062415 +0000 UTC m=+382.699398878" watchObservedRunningTime="2025-12-03 16:24:45.785982723 +0000 UTC m=+382.705319186" Dec 03 16:24:53 crc kubenswrapper[4768]: I1203 16:24:53.927529 4768 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pod13080004-259f-432b-b14a-664d5d0318fa"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pod13080004-259f-432b-b14a-664d5d0318fa] : Timed out while waiting for systemd to remove kubepods-burstable-pod13080004_259f_432b_b14a_664d5d0318fa.slice" Dec 03 16:24:53 crc kubenswrapper[4768]: E1203 16:24:53.928010 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable pod13080004-259f-432b-b14a-664d5d0318fa] : unable to destroy cgroup paths for cgroup [kubepods burstable pod13080004-259f-432b-b14a-664d5d0318fa] : Timed out while waiting for systemd to remove kubepods-burstable-pod13080004_259f_432b_b14a_664d5d0318fa.slice" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" podUID="13080004-259f-432b-b14a-664d5d0318fa" Dec 03 16:24:54 crc kubenswrapper[4768]: I1203 16:24:54.828430 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb" Dec 03 16:24:54 crc kubenswrapper[4768]: I1203 16:24:54.860688 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb"] Dec 03 16:24:54 crc kubenswrapper[4768]: I1203 16:24:54.869958 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6c5dd59bc9-rf9nb"] Dec 03 16:24:55 crc kubenswrapper[4768]: I1203 16:24:55.541369 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13080004-259f-432b-b14a-664d5d0318fa" path="/var/lib/kubelet/pods/13080004-259f-432b-b14a-664d5d0318fa/volumes" Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.028198 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.028282 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.028356 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.029242 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31ca24909cb1fcc053dd6be7141f97fe9a5ad5cf7a5523c30ec1bf9e0da43ac6"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.029346 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://31ca24909cb1fcc053dd6be7141f97fe9a5ad5cf7a5523c30ec1bf9e0da43ac6" gracePeriod=600 Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.839693 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="31ca24909cb1fcc053dd6be7141f97fe9a5ad5cf7a5523c30ec1bf9e0da43ac6" exitCode=0 Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.839792 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"31ca24909cb1fcc053dd6be7141f97fe9a5ad5cf7a5523c30ec1bf9e0da43ac6"} Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.840038 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"0d7f75aea2c344a000180b894ed12cfb3b2c2c89d1c85177e008d90e2ddf0961"} Dec 03 16:24:56 crc kubenswrapper[4768]: I1203 16:24:56.840064 4768 scope.go:117] "RemoveContainer" containerID="8cb9ef65df89af3971ca461eea8805f1f0e5deb330e04db950769370f5bd1099" Dec 03 16:25:02 crc kubenswrapper[4768]: I1203 16:25:02.076228 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-4zst4" Dec 03 16:25:02 crc kubenswrapper[4768]: I1203 16:25:02.124215 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xmph5"] Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.168403 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" podUID="028c92ba-255e-47a9-9404-3ecbcb709029" containerName="registry" containerID="cri-o://9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f" gracePeriod=30 Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.758525 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.902475 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-registry-certificates\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.902784 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.902846 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-trusted-ca\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.902923 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htbqf\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-kube-api-access-htbqf\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.903003 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/028c92ba-255e-47a9-9404-3ecbcb709029-installation-pull-secrets\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.903055 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-registry-tls\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.903190 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/028c92ba-255e-47a9-9404-3ecbcb709029-ca-trust-extracted\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.903233 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-bound-sa-token\") pod \"028c92ba-255e-47a9-9404-3ecbcb709029\" (UID: \"028c92ba-255e-47a9-9404-3ecbcb709029\") " Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.904404 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.904634 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.911423 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.911871 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.912725 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-kube-api-access-htbqf" (OuterVolumeSpecName: "kube-api-access-htbqf") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "kube-api-access-htbqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.915053 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/028c92ba-255e-47a9-9404-3ecbcb709029-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.934486 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:25:27 crc kubenswrapper[4768]: I1203 16:25:27.944717 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/028c92ba-255e-47a9-9404-3ecbcb709029-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "028c92ba-255e-47a9-9404-3ecbcb709029" (UID: "028c92ba-255e-47a9-9404-3ecbcb709029"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.004750 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htbqf\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-kube-api-access-htbqf\") on node \"crc\" DevicePath \"\"" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.004804 4768 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/028c92ba-255e-47a9-9404-3ecbcb709029-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.004824 4768 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.004844 4768 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/028c92ba-255e-47a9-9404-3ecbcb709029-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.004862 4768 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/028c92ba-255e-47a9-9404-3ecbcb709029-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.004878 4768 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.004894 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/028c92ba-255e-47a9-9404-3ecbcb709029-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.037837 4768 generic.go:334] "Generic (PLEG): container finished" podID="028c92ba-255e-47a9-9404-3ecbcb709029" containerID="9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f" exitCode=0 Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.037893 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" event={"ID":"028c92ba-255e-47a9-9404-3ecbcb709029","Type":"ContainerDied","Data":"9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f"} Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.037934 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" event={"ID":"028c92ba-255e-47a9-9404-3ecbcb709029","Type":"ContainerDied","Data":"0d2cdbc339b59c184bfddfad1b3f7137c33d814781d672f07810b4dc6350b0b1"} Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.037938 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xmph5" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.037963 4768 scope.go:117] "RemoveContainer" containerID="9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.060786 4768 scope.go:117] "RemoveContainer" containerID="9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f" Dec 03 16:25:28 crc kubenswrapper[4768]: E1203 16:25:28.061303 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f\": container with ID starting with 9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f not found: ID does not exist" containerID="9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.061341 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f"} err="failed to get container status \"9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f\": rpc error: code = NotFound desc = could not find container \"9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f\": container with ID starting with 9dfa5ad175fb86cd2151057e1a25f49bce99d6d0f3acec12054e3b263d8d082f not found: ID does not exist" Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.084722 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xmph5"] Dec 03 16:25:28 crc kubenswrapper[4768]: I1203 16:25:28.094945 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xmph5"] Dec 03 16:25:29 crc kubenswrapper[4768]: I1203 16:25:29.544362 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="028c92ba-255e-47a9-9404-3ecbcb709029" path="/var/lib/kubelet/pods/028c92ba-255e-47a9-9404-3ecbcb709029/volumes" Dec 03 16:26:56 crc kubenswrapper[4768]: I1203 16:26:56.028476 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:26:56 crc kubenswrapper[4768]: I1203 16:26:56.029151 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:27:26 crc kubenswrapper[4768]: I1203 16:27:26.028574 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:27:26 crc kubenswrapper[4768]: I1203 16:27:26.029216 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:27:56 crc kubenswrapper[4768]: I1203 16:27:56.027863 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:27:56 crc kubenswrapper[4768]: I1203 16:27:56.028669 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:27:56 crc kubenswrapper[4768]: I1203 16:27:56.028734 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:27:56 crc kubenswrapper[4768]: I1203 16:27:56.029415 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0d7f75aea2c344a000180b894ed12cfb3b2c2c89d1c85177e008d90e2ddf0961"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:27:56 crc kubenswrapper[4768]: I1203 16:27:56.029508 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://0d7f75aea2c344a000180b894ed12cfb3b2c2c89d1c85177e008d90e2ddf0961" gracePeriod=600 Dec 03 16:27:57 crc kubenswrapper[4768]: I1203 16:27:57.312538 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="0d7f75aea2c344a000180b894ed12cfb3b2c2c89d1c85177e008d90e2ddf0961" exitCode=0 Dec 03 16:27:57 crc kubenswrapper[4768]: I1203 16:27:57.312690 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"0d7f75aea2c344a000180b894ed12cfb3b2c2c89d1c85177e008d90e2ddf0961"} Dec 03 16:27:57 crc kubenswrapper[4768]: I1203 16:27:57.313440 4768 scope.go:117] "RemoveContainer" containerID="31ca24909cb1fcc053dd6be7141f97fe9a5ad5cf7a5523c30ec1bf9e0da43ac6" Dec 03 16:27:58 crc kubenswrapper[4768]: I1203 16:27:58.350132 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"82a30a2a5648147df1c33aacd2597c45c5d3751d97f69a2fe55f347f7ac5fc32"} Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.197378 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn"] Dec 03 16:30:00 crc kubenswrapper[4768]: E1203 16:30:00.198448 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="028c92ba-255e-47a9-9404-3ecbcb709029" containerName="registry" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.198469 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="028c92ba-255e-47a9-9404-3ecbcb709029" containerName="registry" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.198756 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="028c92ba-255e-47a9-9404-3ecbcb709029" containerName="registry" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.199356 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.201944 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.203407 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.209713 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn"] Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.345350 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7718e65-4028-4ef0-ab6c-06de3b0cab04-config-volume\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.345502 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7718e65-4028-4ef0-ab6c-06de3b0cab04-secret-volume\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.345670 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4qqs\" (UniqueName: \"kubernetes.io/projected/b7718e65-4028-4ef0-ab6c-06de3b0cab04-kube-api-access-l4qqs\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.447357 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4qqs\" (UniqueName: \"kubernetes.io/projected/b7718e65-4028-4ef0-ab6c-06de3b0cab04-kube-api-access-l4qqs\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.447535 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7718e65-4028-4ef0-ab6c-06de3b0cab04-config-volume\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.447633 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7718e65-4028-4ef0-ab6c-06de3b0cab04-secret-volume\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.449305 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7718e65-4028-4ef0-ab6c-06de3b0cab04-config-volume\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.457538 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7718e65-4028-4ef0-ab6c-06de3b0cab04-secret-volume\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.470660 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4qqs\" (UniqueName: \"kubernetes.io/projected/b7718e65-4028-4ef0-ab6c-06de3b0cab04-kube-api-access-l4qqs\") pod \"collect-profiles-29412990-22drn\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.529149 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:00 crc kubenswrapper[4768]: I1203 16:30:00.973828 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn"] Dec 03 16:30:01 crc kubenswrapper[4768]: I1203 16:30:01.441499 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" event={"ID":"b7718e65-4028-4ef0-ab6c-06de3b0cab04","Type":"ContainerStarted","Data":"dcb266857adde0975c5e865252d13c25fb28d7a56e4597347be77545e526ab7a"} Dec 03 16:30:01 crc kubenswrapper[4768]: I1203 16:30:01.441570 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" event={"ID":"b7718e65-4028-4ef0-ab6c-06de3b0cab04","Type":"ContainerStarted","Data":"a53038daa1e0eaa54ebe9c19fb7dfa2d3851d36d1cbb37784361b15217be9b2d"} Dec 03 16:30:01 crc kubenswrapper[4768]: I1203 16:30:01.482309 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" podStartSLOduration=1.482274528 podStartE2EDuration="1.482274528s" podCreationTimestamp="2025-12-03 16:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:30:01.472301986 +0000 UTC m=+698.391638489" watchObservedRunningTime="2025-12-03 16:30:01.482274528 +0000 UTC m=+698.401611001" Dec 03 16:30:02 crc kubenswrapper[4768]: I1203 16:30:02.450478 4768 generic.go:334] "Generic (PLEG): container finished" podID="b7718e65-4028-4ef0-ab6c-06de3b0cab04" containerID="dcb266857adde0975c5e865252d13c25fb28d7a56e4597347be77545e526ab7a" exitCode=0 Dec 03 16:30:02 crc kubenswrapper[4768]: I1203 16:30:02.450650 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" event={"ID":"b7718e65-4028-4ef0-ab6c-06de3b0cab04","Type":"ContainerDied","Data":"dcb266857adde0975c5e865252d13c25fb28d7a56e4597347be77545e526ab7a"} Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.673776 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.799252 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7718e65-4028-4ef0-ab6c-06de3b0cab04-secret-volume\") pod \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.799391 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4qqs\" (UniqueName: \"kubernetes.io/projected/b7718e65-4028-4ef0-ab6c-06de3b0cab04-kube-api-access-l4qqs\") pod \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.799519 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7718e65-4028-4ef0-ab6c-06de3b0cab04-config-volume\") pod \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\" (UID: \"b7718e65-4028-4ef0-ab6c-06de3b0cab04\") " Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.800405 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7718e65-4028-4ef0-ab6c-06de3b0cab04-config-volume" (OuterVolumeSpecName: "config-volume") pod "b7718e65-4028-4ef0-ab6c-06de3b0cab04" (UID: "b7718e65-4028-4ef0-ab6c-06de3b0cab04"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.806590 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7718e65-4028-4ef0-ab6c-06de3b0cab04-kube-api-access-l4qqs" (OuterVolumeSpecName: "kube-api-access-l4qqs") pod "b7718e65-4028-4ef0-ab6c-06de3b0cab04" (UID: "b7718e65-4028-4ef0-ab6c-06de3b0cab04"). InnerVolumeSpecName "kube-api-access-l4qqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.806838 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7718e65-4028-4ef0-ab6c-06de3b0cab04-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b7718e65-4028-4ef0-ab6c-06de3b0cab04" (UID: "b7718e65-4028-4ef0-ab6c-06de3b0cab04"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.900535 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4qqs\" (UniqueName: \"kubernetes.io/projected/b7718e65-4028-4ef0-ab6c-06de3b0cab04-kube-api-access-l4qqs\") on node \"crc\" DevicePath \"\"" Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.900566 4768 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b7718e65-4028-4ef0-ab6c-06de3b0cab04-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:30:03 crc kubenswrapper[4768]: I1203 16:30:03.900578 4768 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b7718e65-4028-4ef0-ab6c-06de3b0cab04-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:30:04 crc kubenswrapper[4768]: I1203 16:30:04.464017 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" event={"ID":"b7718e65-4028-4ef0-ab6c-06de3b0cab04","Type":"ContainerDied","Data":"a53038daa1e0eaa54ebe9c19fb7dfa2d3851d36d1cbb37784361b15217be9b2d"} Dec 03 16:30:04 crc kubenswrapper[4768]: I1203 16:30:04.464367 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a53038daa1e0eaa54ebe9c19fb7dfa2d3851d36d1cbb37784361b15217be9b2d" Dec 03 16:30:04 crc kubenswrapper[4768]: I1203 16:30:04.464080 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn" Dec 03 16:30:26 crc kubenswrapper[4768]: I1203 16:30:26.028780 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:30:26 crc kubenswrapper[4768]: I1203 16:30:26.029485 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.426572 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb"] Dec 03 16:30:53 crc kubenswrapper[4768]: E1203 16:30:53.427292 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7718e65-4028-4ef0-ab6c-06de3b0cab04" containerName="collect-profiles" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.427304 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7718e65-4028-4ef0-ab6c-06de3b0cab04" containerName="collect-profiles" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.427393 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7718e65-4028-4ef0-ab6c-06de3b0cab04" containerName="collect-profiles" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.428068 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.437428 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.445237 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb"] Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.525626 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.525703 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.525733 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nkj6\" (UniqueName: \"kubernetes.io/projected/c013a14e-13e8-4979-95b9-948abf069cdd-kube-api-access-5nkj6\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.627108 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.627301 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nkj6\" (UniqueName: \"kubernetes.io/projected/c013a14e-13e8-4979-95b9-948abf069cdd-kube-api-access-5nkj6\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.627342 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.627559 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.627717 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.658499 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nkj6\" (UniqueName: \"kubernetes.io/projected/c013a14e-13e8-4979-95b9-948abf069cdd-kube-api-access-5nkj6\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:53 crc kubenswrapper[4768]: I1203 16:30:53.752442 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:54 crc kubenswrapper[4768]: I1203 16:30:54.168043 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb"] Dec 03 16:30:54 crc kubenswrapper[4768]: I1203 16:30:54.829348 4768 generic.go:334] "Generic (PLEG): container finished" podID="c013a14e-13e8-4979-95b9-948abf069cdd" containerID="32ea6c81bcb41f19ee83ae56375d2b8257af408b419bca374a831687fe1e5f6b" exitCode=0 Dec 03 16:30:54 crc kubenswrapper[4768]: I1203 16:30:54.829431 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" event={"ID":"c013a14e-13e8-4979-95b9-948abf069cdd","Type":"ContainerDied","Data":"32ea6c81bcb41f19ee83ae56375d2b8257af408b419bca374a831687fe1e5f6b"} Dec 03 16:30:54 crc kubenswrapper[4768]: I1203 16:30:54.829653 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" event={"ID":"c013a14e-13e8-4979-95b9-948abf069cdd","Type":"ContainerStarted","Data":"41dc81bc5c8b5353fae6c6821a7de318c75275016f6be6c2b01ef81f924a8930"} Dec 03 16:30:54 crc kubenswrapper[4768]: I1203 16:30:54.832045 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:30:56 crc kubenswrapper[4768]: I1203 16:30:56.028060 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:30:56 crc kubenswrapper[4768]: I1203 16:30:56.028489 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:30:56 crc kubenswrapper[4768]: I1203 16:30:56.840697 4768 generic.go:334] "Generic (PLEG): container finished" podID="c013a14e-13e8-4979-95b9-948abf069cdd" containerID="07c03f6ebe9a8e856e052f522fa64fc37561ac36573f2ef1bccee7f48dc0f6b7" exitCode=0 Dec 03 16:30:56 crc kubenswrapper[4768]: I1203 16:30:56.840771 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" event={"ID":"c013a14e-13e8-4979-95b9-948abf069cdd","Type":"ContainerDied","Data":"07c03f6ebe9a8e856e052f522fa64fc37561ac36573f2ef1bccee7f48dc0f6b7"} Dec 03 16:30:57 crc kubenswrapper[4768]: I1203 16:30:57.850785 4768 generic.go:334] "Generic (PLEG): container finished" podID="c013a14e-13e8-4979-95b9-948abf069cdd" containerID="b5ff48bf1d8129705e752f70d3f95beaeae15dcba9ad2d445c18b7525f32a328" exitCode=0 Dec 03 16:30:57 crc kubenswrapper[4768]: I1203 16:30:57.850877 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" event={"ID":"c013a14e-13e8-4979-95b9-948abf069cdd","Type":"ContainerDied","Data":"b5ff48bf1d8129705e752f70d3f95beaeae15dcba9ad2d445c18b7525f32a328"} Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.107187 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.205939 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nkj6\" (UniqueName: \"kubernetes.io/projected/c013a14e-13e8-4979-95b9-948abf069cdd-kube-api-access-5nkj6\") pod \"c013a14e-13e8-4979-95b9-948abf069cdd\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.206010 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-util\") pod \"c013a14e-13e8-4979-95b9-948abf069cdd\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.206107 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-bundle\") pod \"c013a14e-13e8-4979-95b9-948abf069cdd\" (UID: \"c013a14e-13e8-4979-95b9-948abf069cdd\") " Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.209167 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-bundle" (OuterVolumeSpecName: "bundle") pod "c013a14e-13e8-4979-95b9-948abf069cdd" (UID: "c013a14e-13e8-4979-95b9-948abf069cdd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.212205 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c013a14e-13e8-4979-95b9-948abf069cdd-kube-api-access-5nkj6" (OuterVolumeSpecName: "kube-api-access-5nkj6") pod "c013a14e-13e8-4979-95b9-948abf069cdd" (UID: "c013a14e-13e8-4979-95b9-948abf069cdd"). InnerVolumeSpecName "kube-api-access-5nkj6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.224903 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-util" (OuterVolumeSpecName: "util") pod "c013a14e-13e8-4979-95b9-948abf069cdd" (UID: "c013a14e-13e8-4979-95b9-948abf069cdd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.307556 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nkj6\" (UniqueName: \"kubernetes.io/projected/c013a14e-13e8-4979-95b9-948abf069cdd-kube-api-access-5nkj6\") on node \"crc\" DevicePath \"\"" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.307623 4768 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.307635 4768 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c013a14e-13e8-4979-95b9-948abf069cdd-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.877291 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" event={"ID":"c013a14e-13e8-4979-95b9-948abf069cdd","Type":"ContainerDied","Data":"41dc81bc5c8b5353fae6c6821a7de318c75275016f6be6c2b01ef81f924a8930"} Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.877795 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41dc81bc5c8b5353fae6c6821a7de318c75275016f6be6c2b01ef81f924a8930" Dec 03 16:30:59 crc kubenswrapper[4768]: I1203 16:30:59.877833 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.545962 4768 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.676142 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5z68m"] Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.676486 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-controller" containerID="cri-o://44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.676539 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="northd" containerID="cri-o://d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.676733 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-acl-logging" containerID="cri-o://9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.676702 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.676719 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="nbdb" containerID="cri-o://f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.676702 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-node" containerID="cri-o://f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.677264 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="sbdb" containerID="cri-o://20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.708763 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" containerID="cri-o://bb808d15e9e16ea9966c9c40bc58f993c43fa774e04642b783f3888bb420c016" gracePeriod=30 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.909096 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/2.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.910374 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/1.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.910426 4768 generic.go:334] "Generic (PLEG): container finished" podID="d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9" containerID="3a6bdd46568d340f1950afc6f2b9373a8d008c82e8121d47a8c36dd53954e582" exitCode=2 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.910505 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerDied","Data":"3a6bdd46568d340f1950afc6f2b9373a8d008c82e8121d47a8c36dd53954e582"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.910552 4768 scope.go:117] "RemoveContainer" containerID="eb222da2bdb9eb995db84df4b9344a78590543f6543ab83ebb18d354f0adc709" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.911175 4768 scope.go:117] "RemoveContainer" containerID="3a6bdd46568d340f1950afc6f2b9373a8d008c82e8121d47a8c36dd53954e582" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.943499 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/3.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.945461 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovn-acl-logging/0.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.945892 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovn-controller/0.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947011 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="bb808d15e9e16ea9966c9c40bc58f993c43fa774e04642b783f3888bb420c016" exitCode=0 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947036 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df" exitCode=0 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947044 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515" exitCode=0 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947052 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96" exitCode=0 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947059 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c" exitCode=0 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947066 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6" exitCode=0 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947073 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a" exitCode=143 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947079 4768 generic.go:334] "Generic (PLEG): container finished" podID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerID="44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd" exitCode=143 Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947061 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"bb808d15e9e16ea9966c9c40bc58f993c43fa774e04642b783f3888bb420c016"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947426 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947486 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947504 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947519 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947532 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947544 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.947560 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd"} Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.977765 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovnkube-controller/3.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.979669 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovn-acl-logging/0.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.980011 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovn-controller/0.log" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.980312 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:31:04 crc kubenswrapper[4768]: I1203 16:31:04.987661 4768 scope.go:117] "RemoveContainer" containerID="3a0c32e5dd3334b01b9215cc0f3dc15c8e4bf38832be70f6459ba9444edab50e" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.056813 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vgcfq"] Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057106 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c013a14e-13e8-4979-95b9-948abf069cdd" containerName="pull" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057121 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="c013a14e-13e8-4979-95b9-948abf069cdd" containerName="pull" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057135 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c013a14e-13e8-4979-95b9-948abf069cdd" containerName="extract" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057141 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="c013a14e-13e8-4979-95b9-948abf069cdd" containerName="extract" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057150 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c013a14e-13e8-4979-95b9-948abf069cdd" containerName="util" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057158 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="c013a14e-13e8-4979-95b9-948abf069cdd" containerName="util" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057164 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-node" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057171 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-node" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057184 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-acl-logging" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057190 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-acl-logging" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057200 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057207 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057215 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kubecfg-setup" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057220 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kubecfg-setup" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057247 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057253 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057260 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="northd" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057265 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="northd" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057274 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057281 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057287 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057293 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057303 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="nbdb" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057309 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="nbdb" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057322 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="sbdb" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057331 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="sbdb" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057347 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057352 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057360 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057366 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057467 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="sbdb" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057479 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-acl-logging" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057488 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-node" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057496 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057502 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="northd" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057510 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovn-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057519 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="nbdb" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057527 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057533 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="c013a14e-13e8-4979-95b9-948abf069cdd" containerName="extract" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057540 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="kube-rbac-proxy-ovn-metrics" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057551 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: E1203 16:31:05.057666 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057673 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057762 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.057772 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" containerName="ovnkube-controller" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.059767 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084683 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-systemd\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084746 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-config\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084767 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-slash\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084785 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-ovn\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084806 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-netd\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084825 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-kubelet\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084845 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-systemd-units\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084863 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-ovn-kubernetes\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084885 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-bin\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084900 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-var-lib-cni-networks-ovn-kubernetes\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084934 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-env-overrides\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084949 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-etc-openvswitch\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084974 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-log-socket\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.084990 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-script-lib\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.085016 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-openvswitch\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.085037 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovn-node-metrics-cert\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.085055 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrwrt\" (UniqueName: \"kubernetes.io/projected/29dd67f1-08a6-43ed-840d-cf4b166d5664-kube-api-access-hrwrt\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.085084 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-node-log\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.085103 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-netns\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.085129 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-var-lib-openvswitch\") pod \"29dd67f1-08a6-43ed-840d-cf4b166d5664\" (UID: \"29dd67f1-08a6-43ed-840d-cf4b166d5664\") " Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.086733 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087160 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087196 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-slash" (OuterVolumeSpecName: "host-slash") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087216 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087234 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087251 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087270 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087285 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087303 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.087321 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.089149 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-log-socket" (OuterVolumeSpecName: "log-socket") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.089245 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.089707 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.089747 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.090108 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.090206 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-node-log" (OuterVolumeSpecName: "node-log") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.090506 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.098568 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29dd67f1-08a6-43ed-840d-cf4b166d5664-kube-api-access-hrwrt" (OuterVolumeSpecName: "kube-api-access-hrwrt") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "kube-api-access-hrwrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.103857 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.106533 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "29dd67f1-08a6-43ed-840d-cf4b166d5664" (UID: "29dd67f1-08a6-43ed-840d-cf4b166d5664"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186518 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-run-netns\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186568 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-node-log\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186585 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovnkube-script-lib\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186614 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186632 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-var-lib-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186655 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-log-socket\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186669 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186683 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovnkube-config\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186704 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-systemd-units\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186729 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186772 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-systemd\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186795 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tvj8\" (UniqueName: \"kubernetes.io/projected/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-kube-api-access-5tvj8\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186817 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-cni-bin\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186847 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-env-overrides\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186867 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-cni-netd\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186885 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-kubelet\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186909 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-etc-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186928 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-slash\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186946 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovn-node-metrics-cert\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186962 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-ovn\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.186996 4768 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187008 4768 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187017 4768 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-slash\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187025 4768 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187035 4768 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187043 4768 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187052 4768 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187060 4768 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187069 4768 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187078 4768 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187086 4768 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187094 4768 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187103 4768 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187111 4768 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-log-socket\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187120 4768 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187129 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrwrt\" (UniqueName: \"kubernetes.io/projected/29dd67f1-08a6-43ed-840d-cf4b166d5664-kube-api-access-hrwrt\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187138 4768 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/29dd67f1-08a6-43ed-840d-cf4b166d5664-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187146 4768 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-node-log\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187156 4768 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.187165 4768 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/29dd67f1-08a6-43ed-840d-cf4b166d5664-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288193 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288288 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovnkube-config\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288327 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-systemd-units\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288365 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288430 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-systemd\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288462 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tvj8\" (UniqueName: \"kubernetes.io/projected/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-kube-api-access-5tvj8\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288499 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-cni-bin\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288533 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-env-overrides\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288568 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-cni-netd\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288630 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-kubelet\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288679 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-etc-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288711 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovn-node-metrics-cert\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288740 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-slash\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288772 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-ovn\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288810 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-run-netns\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288840 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovnkube-script-lib\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288867 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-node-log\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288899 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288928 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-var-lib-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.288968 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-log-socket\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.289084 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-log-socket\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.289144 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.290767 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovnkube-config\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.290853 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-systemd-units\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.290900 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.290947 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-systemd\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.291332 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-cni-bin\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.291982 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-env-overrides\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.292053 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-cni-netd\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.292097 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-kubelet\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.292137 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-etc-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.292978 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-run-ovn\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.293096 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-slash\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.293126 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-run-netns\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.293161 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.293188 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-node-log\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.293217 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-var-lib-openvswitch\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.293650 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovnkube-script-lib\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.297910 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-ovn-node-metrics-cert\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.327505 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tvj8\" (UniqueName: \"kubernetes.io/projected/35bc04e1-ab8e-4e8a-be06-c4f6bb828baa-kube-api-access-5tvj8\") pod \"ovnkube-node-vgcfq\" (UID: \"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.376990 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.953517 4768 generic.go:334] "Generic (PLEG): container finished" podID="35bc04e1-ab8e-4e8a-be06-c4f6bb828baa" containerID="d44a441d35733515cd68217983a92a98ef60760e164bb29496205e5ee94f6980" exitCode=0 Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.953635 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerDied","Data":"d44a441d35733515cd68217983a92a98ef60760e164bb29496205e5ee94f6980"} Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.954170 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"16266a61f7bd0784baa86925d1a545259afacbb75458ede7eb43e8f82885fb43"} Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.958474 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovn-acl-logging/0.log" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.959010 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5z68m_29dd67f1-08a6-43ed-840d-cf4b166d5664/ovn-controller/0.log" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.959370 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" event={"ID":"29dd67f1-08a6-43ed-840d-cf4b166d5664","Type":"ContainerDied","Data":"3dabdd1faab942b3c53611e55a42e0a8a48f654bf99e4ad9077dd39de24b6584"} Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.959413 4768 scope.go:117] "RemoveContainer" containerID="bb808d15e9e16ea9966c9c40bc58f993c43fa774e04642b783f3888bb420c016" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.959459 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5z68m" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.961899 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8d4dq_d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9/kube-multus/2.log" Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.961974 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8d4dq" event={"ID":"d5ddfa20-4ba1-4257-8bc5-614fbeff6ee9","Type":"ContainerStarted","Data":"d3aefaa6bc35bc31f77868908d7a6d3822b4ce16793967ad8b238f7796f32936"} Dec 03 16:31:05 crc kubenswrapper[4768]: I1203 16:31:05.981942 4768 scope.go:117] "RemoveContainer" containerID="20db0aac20bdeedca9cc215446bef004471a795e9948a6226feb31fdc6b1b9df" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.013423 4768 scope.go:117] "RemoveContainer" containerID="f31d5b70375e3e16d6cd9ae78396d9133544e47749416d6cded012f4f0f32515" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.031898 4768 scope.go:117] "RemoveContainer" containerID="d9cff59265ebfb02e751dbfc0dc34fc54b97d951f827d49a46345ed0f89f9e96" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.053978 4768 scope.go:117] "RemoveContainer" containerID="8f2898839fb3b4944ea5caefc66beca51c4bd303c893e6773dc93b960e63029c" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.077407 4768 scope.go:117] "RemoveContainer" containerID="f58369ce39dd06aefa00b1d73fcb580352d80803ed2499baa7addc10cc531bb6" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.108190 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5z68m"] Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.109819 4768 scope.go:117] "RemoveContainer" containerID="9b3b62969c8f8df6c485d969b5b5971e2d9f792a22ec526970f61a5e4d4bee6a" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.112817 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5z68m"] Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.130866 4768 scope.go:117] "RemoveContainer" containerID="44946ca7a5424b91bce8139ec749f717ebdf7b9c2f33d4aedfa61d5a6c6000fd" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.167059 4768 scope.go:117] "RemoveContainer" containerID="e63959755148bce3809e9b015c9d1d5ee89a1b9b9dd1f3b5e1f27a9d121b3838" Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.971110 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"d93d70db90d9b516d8d7b03b930e2a47e632f69978bd09949096ae918ba10b8f"} Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.971609 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"37dc7c43a031ba8d9c5c9595a4840b6089b3dce6c59cea2d2ce8ed9f446079bf"} Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.971621 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"e1ac466825993324ba951f1e7b6198c91bdc738bed9b7586ce98cac1d91933f3"} Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.971630 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"d7dd458ff69e84ebb47f585457b50a3c6a22cafc9525df8def46aea58cc99272"} Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.971639 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"24a24028249fb4a589a20ae397bbef00dbe8937a4dbfc0094e64d8929be54cf7"} Dec 03 16:31:06 crc kubenswrapper[4768]: I1203 16:31:06.971651 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"6e242b9747ad36ae61c39d06013a598938bd8f294d186b3e9d6f3e192552537b"} Dec 03 16:31:07 crc kubenswrapper[4768]: I1203 16:31:07.537925 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29dd67f1-08a6-43ed-840d-cf4b166d5664" path="/var/lib/kubelet/pods/29dd67f1-08a6-43ed-840d-cf4b166d5664/volumes" Dec 03 16:31:09 crc kubenswrapper[4768]: I1203 16:31:09.988274 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"066305037e80077336f36f9845c9fccd6d20795c9eb3f24ec19563a96b62aa2b"} Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.602475 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr"] Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.603144 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.605092 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-5rvsm" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.607775 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.608362 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.737950 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8"] Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.738529 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.740518 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-chn2h" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.740729 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.745086 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf"] Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.745874 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.772251 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2k65q\" (UniqueName: \"kubernetes.io/projected/48a3a666-c857-4b4f-858c-43bc2f9d6f08-kube-api-access-2k65q\") pod \"obo-prometheus-operator-668cf9dfbb-cgfsr\" (UID: \"48a3a666-c857-4b4f-858c-43bc2f9d6f08\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.847644 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7wglg"] Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.848274 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.850751 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-4m487" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.859351 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.873870 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2k65q\" (UniqueName: \"kubernetes.io/projected/48a3a666-c857-4b4f-858c-43bc2f9d6f08-kube-api-access-2k65q\") pod \"obo-prometheus-operator-668cf9dfbb-cgfsr\" (UID: \"48a3a666-c857-4b4f-858c-43bc2f9d6f08\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.873923 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/286fed40-67a8-4eab-9ca8-3c7609503df1-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8\" (UID: \"286fed40-67a8-4eab-9ca8-3c7609503df1\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.873970 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cff0ca88-1474-46c7-b046-cec35a7d2409-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf\" (UID: \"cff0ca88-1474-46c7-b046-cec35a7d2409\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.874076 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cff0ca88-1474-46c7-b046-cec35a7d2409-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf\" (UID: \"cff0ca88-1474-46c7-b046-cec35a7d2409\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.874129 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/286fed40-67a8-4eab-9ca8-3c7609503df1-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8\" (UID: \"286fed40-67a8-4eab-9ca8-3c7609503df1\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.894149 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2k65q\" (UniqueName: \"kubernetes.io/projected/48a3a666-c857-4b4f-858c-43bc2f9d6f08-kube-api-access-2k65q\") pod \"obo-prometheus-operator-668cf9dfbb-cgfsr\" (UID: \"48a3a666-c857-4b4f-858c-43bc2f9d6f08\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.920477 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:10 crc kubenswrapper[4768]: E1203 16:31:10.951345 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(14bc6b67325ad4063dd3b17cfae29d72fd2b6bdf45d409f19c309c17b447ec7f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:10 crc kubenswrapper[4768]: E1203 16:31:10.951423 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(14bc6b67325ad4063dd3b17cfae29d72fd2b6bdf45d409f19c309c17b447ec7f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:10 crc kubenswrapper[4768]: E1203 16:31:10.951447 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(14bc6b67325ad4063dd3b17cfae29d72fd2b6bdf45d409f19c309c17b447ec7f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:10 crc kubenswrapper[4768]: E1203 16:31:10.951494 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators(48a3a666-c857-4b4f-858c-43bc2f9d6f08)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators(48a3a666-c857-4b4f-858c-43bc2f9d6f08)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(14bc6b67325ad4063dd3b17cfae29d72fd2b6bdf45d409f19c309c17b447ec7f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" podUID="48a3a666-c857-4b4f-858c-43bc2f9d6f08" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.966140 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hrj6l"] Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.966989 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.968994 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-2jljq" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.975243 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/286fed40-67a8-4eab-9ca8-3c7609503df1-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8\" (UID: \"286fed40-67a8-4eab-9ca8-3c7609503df1\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.975291 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/286fed40-67a8-4eab-9ca8-3c7609503df1-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8\" (UID: \"286fed40-67a8-4eab-9ca8-3c7609503df1\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.975338 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cff0ca88-1474-46c7-b046-cec35a7d2409-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf\" (UID: \"cff0ca88-1474-46c7-b046-cec35a7d2409\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.975361 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pvnl\" (UniqueName: \"kubernetes.io/projected/37340e28-1544-4f32-aed0-4c1d277cbf95-kube-api-access-9pvnl\") pod \"observability-operator-d8bb48f5d-7wglg\" (UID: \"37340e28-1544-4f32-aed0-4c1d277cbf95\") " pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.975914 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/37340e28-1544-4f32-aed0-4c1d277cbf95-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7wglg\" (UID: \"37340e28-1544-4f32-aed0-4c1d277cbf95\") " pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.975945 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cff0ca88-1474-46c7-b046-cec35a7d2409-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf\" (UID: \"cff0ca88-1474-46c7-b046-cec35a7d2409\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.980619 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cff0ca88-1474-46c7-b046-cec35a7d2409-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf\" (UID: \"cff0ca88-1474-46c7-b046-cec35a7d2409\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.982437 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/286fed40-67a8-4eab-9ca8-3c7609503df1-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8\" (UID: \"286fed40-67a8-4eab-9ca8-3c7609503df1\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.982618 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/286fed40-67a8-4eab-9ca8-3c7609503df1-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8\" (UID: \"286fed40-67a8-4eab-9ca8-3c7609503df1\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:10 crc kubenswrapper[4768]: I1203 16:31:10.982756 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cff0ca88-1474-46c7-b046-cec35a7d2409-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf\" (UID: \"cff0ca88-1474-46c7-b046-cec35a7d2409\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.051981 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.063862 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.073569 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(e076c44b9fcf412913b0509122afdbedc7e4ed438df04ab3c3c0fca9c5cebb3a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.073651 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(e076c44b9fcf412913b0509122afdbedc7e4ed438df04ab3c3c0fca9c5cebb3a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.073674 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(e076c44b9fcf412913b0509122afdbedc7e4ed438df04ab3c3c0fca9c5cebb3a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.073727 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators(286fed40-67a8-4eab-9ca8-3c7609503df1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators(286fed40-67a8-4eab-9ca8-3c7609503df1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(e076c44b9fcf412913b0509122afdbedc7e4ed438df04ab3c3c0fca9c5cebb3a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" podUID="286fed40-67a8-4eab-9ca8-3c7609503df1" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.077245 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pvnl\" (UniqueName: \"kubernetes.io/projected/37340e28-1544-4f32-aed0-4c1d277cbf95-kube-api-access-9pvnl\") pod \"observability-operator-d8bb48f5d-7wglg\" (UID: \"37340e28-1544-4f32-aed0-4c1d277cbf95\") " pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.077301 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cebba08d-4a33-458a-9893-e717d6359f90-openshift-service-ca\") pod \"perses-operator-5446b9c989-hrj6l\" (UID: \"cebba08d-4a33-458a-9893-e717d6359f90\") " pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.077334 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/37340e28-1544-4f32-aed0-4c1d277cbf95-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7wglg\" (UID: \"37340e28-1544-4f32-aed0-4c1d277cbf95\") " pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.077353 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5cq9\" (UniqueName: \"kubernetes.io/projected/cebba08d-4a33-458a-9893-e717d6359f90-kube-api-access-r5cq9\") pod \"perses-operator-5446b9c989-hrj6l\" (UID: \"cebba08d-4a33-458a-9893-e717d6359f90\") " pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.082430 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/37340e28-1544-4f32-aed0-4c1d277cbf95-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-7wglg\" (UID: \"37340e28-1544-4f32-aed0-4c1d277cbf95\") " pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.099292 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pvnl\" (UniqueName: \"kubernetes.io/projected/37340e28-1544-4f32-aed0-4c1d277cbf95-kube-api-access-9pvnl\") pod \"observability-operator-d8bb48f5d-7wglg\" (UID: \"37340e28-1544-4f32-aed0-4c1d277cbf95\") " pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.102774 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a2dd19caa60dc2d7eb83dbbaea8852c5c39fc3bd2c531cd57f88dcea3b39ca02): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.102825 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a2dd19caa60dc2d7eb83dbbaea8852c5c39fc3bd2c531cd57f88dcea3b39ca02): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.102843 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a2dd19caa60dc2d7eb83dbbaea8852c5c39fc3bd2c531cd57f88dcea3b39ca02): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.102893 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators(cff0ca88-1474-46c7-b046-cec35a7d2409)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators(cff0ca88-1474-46c7-b046-cec35a7d2409)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a2dd19caa60dc2d7eb83dbbaea8852c5c39fc3bd2c531cd57f88dcea3b39ca02): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" podUID="cff0ca88-1474-46c7-b046-cec35a7d2409" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.162992 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.179139 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cebba08d-4a33-458a-9893-e717d6359f90-openshift-service-ca\") pod \"perses-operator-5446b9c989-hrj6l\" (UID: \"cebba08d-4a33-458a-9893-e717d6359f90\") " pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.179185 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5cq9\" (UniqueName: \"kubernetes.io/projected/cebba08d-4a33-458a-9893-e717d6359f90-kube-api-access-r5cq9\") pod \"perses-operator-5446b9c989-hrj6l\" (UID: \"cebba08d-4a33-458a-9893-e717d6359f90\") " pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.180057 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/cebba08d-4a33-458a-9893-e717d6359f90-openshift-service-ca\") pod \"perses-operator-5446b9c989-hrj6l\" (UID: \"cebba08d-4a33-458a-9893-e717d6359f90\") " pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.181094 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(eed16f977a47d9b1ed97ee2bcf4539b48799a3c53787841a64f2369c5206de7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.181139 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(eed16f977a47d9b1ed97ee2bcf4539b48799a3c53787841a64f2369c5206de7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.181158 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(eed16f977a47d9b1ed97ee2bcf4539b48799a3c53787841a64f2369c5206de7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.181219 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-7wglg_openshift-operators(37340e28-1544-4f32-aed0-4c1d277cbf95)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-7wglg_openshift-operators(37340e28-1544-4f32-aed0-4c1d277cbf95)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(eed16f977a47d9b1ed97ee2bcf4539b48799a3c53787841a64f2369c5206de7b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" podUID="37340e28-1544-4f32-aed0-4c1d277cbf95" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.195110 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5cq9\" (UniqueName: \"kubernetes.io/projected/cebba08d-4a33-458a-9893-e717d6359f90-kube-api-access-r5cq9\") pod \"perses-operator-5446b9c989-hrj6l\" (UID: \"cebba08d-4a33-458a-9893-e717d6359f90\") " pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: I1203 16:31:11.284579 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.302563 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(bc0088e285c423820d2bbc74264c7b68d7049169414114ff8a60f75d646b7590): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.302640 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(bc0088e285c423820d2bbc74264c7b68d7049169414114ff8a60f75d646b7590): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.302662 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(bc0088e285c423820d2bbc74264c7b68d7049169414114ff8a60f75d646b7590): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:11 crc kubenswrapper[4768]: E1203 16:31:11.302707 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-hrj6l_openshift-operators(cebba08d-4a33-458a-9893-e717d6359f90)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-hrj6l_openshift-operators(cebba08d-4a33-458a-9893-e717d6359f90)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(bc0088e285c423820d2bbc74264c7b68d7049169414114ff8a60f75d646b7590): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" podUID="cebba08d-4a33-458a-9893-e717d6359f90" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.019979 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" event={"ID":"35bc04e1-ab8e-4e8a-be06-c4f6bb828baa","Type":"ContainerStarted","Data":"c2b2cb93155ccd260a30777ef19aec71e72edc9afc8bfce712d5e3ba4731c585"} Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.020283 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.020294 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.051701 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.054517 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" podStartSLOduration=8.054505764 podStartE2EDuration="8.054505764s" podCreationTimestamp="2025-12-03 16:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:31:13.05437296 +0000 UTC m=+769.973709393" watchObservedRunningTime="2025-12-03 16:31:13.054505764 +0000 UTC m=+769.973842187" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.178526 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr"] Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.179053 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.179510 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.198654 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hrj6l"] Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.198750 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.199133 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.204123 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7wglg"] Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.204220 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.204559 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.216481 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf"] Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.216518 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8"] Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.216564 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.216810 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.217416 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:13 crc kubenswrapper[4768]: I1203 16:31:13.217427 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.230998 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(36485b326d5daf145f3423945ec198cf085c8293d0d50a7fe87768c9d3e9e3f4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.231066 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(36485b326d5daf145f3423945ec198cf085c8293d0d50a7fe87768c9d3e9e3f4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.231086 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(36485b326d5daf145f3423945ec198cf085c8293d0d50a7fe87768c9d3e9e3f4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.231131 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators(48a3a666-c857-4b4f-858c-43bc2f9d6f08)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators(48a3a666-c857-4b4f-858c-43bc2f9d6f08)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-cgfsr_openshift-operators_48a3a666-c857-4b4f-858c-43bc2f9d6f08_0(36485b326d5daf145f3423945ec198cf085c8293d0d50a7fe87768c9d3e9e3f4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" podUID="48a3a666-c857-4b4f-858c-43bc2f9d6f08" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.259193 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(424ce73d37bb27c8ddb18971e601b9d3fe326469b61c11eb4843bd4277416343): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.259261 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(424ce73d37bb27c8ddb18971e601b9d3fe326469b61c11eb4843bd4277416343): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.259292 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(424ce73d37bb27c8ddb18971e601b9d3fe326469b61c11eb4843bd4277416343): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.259343 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-hrj6l_openshift-operators(cebba08d-4a33-458a-9893-e717d6359f90)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-hrj6l_openshift-operators(cebba08d-4a33-458a-9893-e717d6359f90)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-hrj6l_openshift-operators_cebba08d-4a33-458a-9893-e717d6359f90_0(424ce73d37bb27c8ddb18971e601b9d3fe326469b61c11eb4843bd4277416343): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" podUID="cebba08d-4a33-458a-9893-e717d6359f90" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.272044 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(85aac3051ae00703fef18ac681993fe96a8e58ea7bb1a0e33966f6fe3ea70de9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.272114 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(85aac3051ae00703fef18ac681993fe96a8e58ea7bb1a0e33966f6fe3ea70de9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.272139 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(85aac3051ae00703fef18ac681993fe96a8e58ea7bb1a0e33966f6fe3ea70de9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.272191 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators(286fed40-67a8-4eab-9ca8-3c7609503df1)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators(286fed40-67a8-4eab-9ca8-3c7609503df1)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_openshift-operators_286fed40-67a8-4eab-9ca8-3c7609503df1_0(85aac3051ae00703fef18ac681993fe96a8e58ea7bb1a0e33966f6fe3ea70de9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" podUID="286fed40-67a8-4eab-9ca8-3c7609503df1" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.280873 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a0fdb498bf84701cdad8b6dddb5bae748568ba3e4a2b6e4c8536a3b874dde82e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.280935 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a0fdb498bf84701cdad8b6dddb5bae748568ba3e4a2b6e4c8536a3b874dde82e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.280955 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a0fdb498bf84701cdad8b6dddb5bae748568ba3e4a2b6e4c8536a3b874dde82e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.281003 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators(cff0ca88-1474-46c7-b046-cec35a7d2409)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators(cff0ca88-1474-46c7-b046-cec35a7d2409)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_openshift-operators_cff0ca88-1474-46c7-b046-cec35a7d2409_0(a0fdb498bf84701cdad8b6dddb5bae748568ba3e4a2b6e4c8536a3b874dde82e): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" podUID="cff0ca88-1474-46c7-b046-cec35a7d2409" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.305120 4768 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(7c6989df2a6a633371ae56699710c437f853ef53781990f8ef899fd058cf88c6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.305180 4768 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(7c6989df2a6a633371ae56699710c437f853ef53781990f8ef899fd058cf88c6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.305205 4768 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(7c6989df2a6a633371ae56699710c437f853ef53781990f8ef899fd058cf88c6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:13 crc kubenswrapper[4768]: E1203 16:31:13.305239 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-7wglg_openshift-operators(37340e28-1544-4f32-aed0-4c1d277cbf95)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-7wglg_openshift-operators(37340e28-1544-4f32-aed0-4c1d277cbf95)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-7wglg_openshift-operators_37340e28-1544-4f32-aed0-4c1d277cbf95_0(7c6989df2a6a633371ae56699710c437f853ef53781990f8ef899fd058cf88c6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" podUID="37340e28-1544-4f32-aed0-4c1d277cbf95" Dec 03 16:31:14 crc kubenswrapper[4768]: I1203 16:31:14.026364 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:14 crc kubenswrapper[4768]: I1203 16:31:14.098509 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:16 crc kubenswrapper[4768]: I1203 16:31:16.067121 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgcfq" Dec 03 16:31:24 crc kubenswrapper[4768]: I1203 16:31:24.530794 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:24 crc kubenswrapper[4768]: I1203 16:31:24.533132 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" Dec 03 16:31:24 crc kubenswrapper[4768]: I1203 16:31:24.752051 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr"] Dec 03 16:31:24 crc kubenswrapper[4768]: W1203 16:31:24.760441 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48a3a666_c857_4b4f_858c_43bc2f9d6f08.slice/crio-67d2aea3481c8e5dca75d1d0e42f9a6ecf60a668d790c08f2ad11224df88dca1 WatchSource:0}: Error finding container 67d2aea3481c8e5dca75d1d0e42f9a6ecf60a668d790c08f2ad11224df88dca1: Status 404 returned error can't find the container with id 67d2aea3481c8e5dca75d1d0e42f9a6ecf60a668d790c08f2ad11224df88dca1 Dec 03 16:31:25 crc kubenswrapper[4768]: I1203 16:31:25.080629 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" event={"ID":"48a3a666-c857-4b4f-858c-43bc2f9d6f08","Type":"ContainerStarted","Data":"67d2aea3481c8e5dca75d1d0e42f9a6ecf60a668d790c08f2ad11224df88dca1"} Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.028275 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.028343 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.028395 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.029061 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"82a30a2a5648147df1c33aacd2597c45c5d3751d97f69a2fe55f347f7ac5fc32"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.029117 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://82a30a2a5648147df1c33aacd2597c45c5d3751d97f69a2fe55f347f7ac5fc32" gracePeriod=600 Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.530816 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.531968 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" Dec 03 16:31:26 crc kubenswrapper[4768]: I1203 16:31:26.772643 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8"] Dec 03 16:31:26 crc kubenswrapper[4768]: W1203 16:31:26.793811 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod286fed40_67a8_4eab_9ca8_3c7609503df1.slice/crio-02065866b8b49539b095dc0e9e2b1a7f1896e3dac10610e76c0e2d200c0b02a2 WatchSource:0}: Error finding container 02065866b8b49539b095dc0e9e2b1a7f1896e3dac10610e76c0e2d200c0b02a2: Status 404 returned error can't find the container with id 02065866b8b49539b095dc0e9e2b1a7f1896e3dac10610e76c0e2d200c0b02a2 Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.098001 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" event={"ID":"286fed40-67a8-4eab-9ca8-3c7609503df1","Type":"ContainerStarted","Data":"02065866b8b49539b095dc0e9e2b1a7f1896e3dac10610e76c0e2d200c0b02a2"} Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.101106 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="82a30a2a5648147df1c33aacd2597c45c5d3751d97f69a2fe55f347f7ac5fc32" exitCode=0 Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.101170 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"82a30a2a5648147df1c33aacd2597c45c5d3751d97f69a2fe55f347f7ac5fc32"} Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.101188 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"3989add581768ea157c22213a30e680cfe61b22aa425273f58411706a4cfe346"} Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.101854 4768 scope.go:117] "RemoveContainer" containerID="0d7f75aea2c344a000180b894ed12cfb3b2c2c89d1c85177e008d90e2ddf0961" Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.530837 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.531364 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.531823 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.532203 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:27 crc kubenswrapper[4768]: I1203 16:31:27.882511 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf"] Dec 03 16:31:28 crc kubenswrapper[4768]: I1203 16:31:28.108900 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" event={"ID":"cff0ca88-1474-46c7-b046-cec35a7d2409","Type":"ContainerStarted","Data":"62b2e325c83f62b6dda5b0f7bac1920b89f0095b79a60c7d57f7e54d2d8c93ae"} Dec 03 16:31:28 crc kubenswrapper[4768]: I1203 16:31:28.117727 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-7wglg"] Dec 03 16:31:28 crc kubenswrapper[4768]: W1203 16:31:28.124417 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37340e28_1544_4f32_aed0_4c1d277cbf95.slice/crio-dc1348c9656d5fd989d741cd8a48d44ca766a0ec90423ec89db43a3b319574e3 WatchSource:0}: Error finding container dc1348c9656d5fd989d741cd8a48d44ca766a0ec90423ec89db43a3b319574e3: Status 404 returned error can't find the container with id dc1348c9656d5fd989d741cd8a48d44ca766a0ec90423ec89db43a3b319574e3 Dec 03 16:31:28 crc kubenswrapper[4768]: I1203 16:31:28.533853 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:28 crc kubenswrapper[4768]: I1203 16:31:28.534351 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:28 crc kubenswrapper[4768]: I1203 16:31:28.817488 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-hrj6l"] Dec 03 16:31:28 crc kubenswrapper[4768]: W1203 16:31:28.825031 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcebba08d_4a33_458a_9893_e717d6359f90.slice/crio-29559fe1e8b226f58d38d67d3c2c6ab57ba57054badb5de2605c1108fffc26e9 WatchSource:0}: Error finding container 29559fe1e8b226f58d38d67d3c2c6ab57ba57054badb5de2605c1108fffc26e9: Status 404 returned error can't find the container with id 29559fe1e8b226f58d38d67d3c2c6ab57ba57054badb5de2605c1108fffc26e9 Dec 03 16:31:29 crc kubenswrapper[4768]: I1203 16:31:29.115731 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" event={"ID":"cebba08d-4a33-458a-9893-e717d6359f90","Type":"ContainerStarted","Data":"29559fe1e8b226f58d38d67d3c2c6ab57ba57054badb5de2605c1108fffc26e9"} Dec 03 16:31:29 crc kubenswrapper[4768]: I1203 16:31:29.117971 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" event={"ID":"37340e28-1544-4f32-aed0-4c1d277cbf95","Type":"ContainerStarted","Data":"dc1348c9656d5fd989d741cd8a48d44ca766a0ec90423ec89db43a3b319574e3"} Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.214105 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" event={"ID":"48a3a666-c857-4b4f-858c-43bc2f9d6f08","Type":"ContainerStarted","Data":"ea86471cf733a569d8eba06dff5ff2e7070c928554354c5b57a907c51491a65d"} Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.219395 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" event={"ID":"286fed40-67a8-4eab-9ca8-3c7609503df1","Type":"ContainerStarted","Data":"858119e5cd1ec0cc19ff3d37fcbef993312701b24c3c1ce825221059d87ebb91"} Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.221109 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" event={"ID":"cff0ca88-1474-46c7-b046-cec35a7d2409","Type":"ContainerStarted","Data":"5488f533d2040faf9635e10045eba27642db1f74721c32bc015a5a8679cafc36"} Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.223901 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" event={"ID":"cebba08d-4a33-458a-9893-e717d6359f90","Type":"ContainerStarted","Data":"4e47be207c3bc9f695b8f04ec6a8567f403667c4a9c02d89ae55afcc69cfbf44"} Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.224508 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.226195 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" event={"ID":"37340e28-1544-4f32-aed0-4c1d277cbf95","Type":"ContainerStarted","Data":"bee2a921c2c144ef5cd51949686d6d035369991cd81673f98dba5059de4351c8"} Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.226494 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.227700 4768 patch_prober.go:28] interesting pod/observability-operator-d8bb48f5d-7wglg container/operator namespace/openshift-operators: Readiness probe status=failure output="Get \"http://10.217.0.22:8081/healthz\": dial tcp 10.217.0.22:8081: connect: connection refused" start-of-body= Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.227766 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" podUID="37340e28-1544-4f32-aed0-4c1d277cbf95" containerName="operator" probeResult="failure" output="Get \"http://10.217.0.22:8081/healthz\": dial tcp 10.217.0.22:8081: connect: connection refused" Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.248977 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-cgfsr" podStartSLOduration=15.318969771999999 podStartE2EDuration="31.248940921s" podCreationTimestamp="2025-12-03 16:31:10 +0000 UTC" firstStartedPulling="2025-12-03 16:31:24.763429364 +0000 UTC m=+781.682765797" lastFinishedPulling="2025-12-03 16:31:40.693400523 +0000 UTC m=+797.612736946" observedRunningTime="2025-12-03 16:31:41.246511675 +0000 UTC m=+798.165848138" watchObservedRunningTime="2025-12-03 16:31:41.248940921 +0000 UTC m=+798.168277414" Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.272581 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf" podStartSLOduration=18.485210486 podStartE2EDuration="31.272553996s" podCreationTimestamp="2025-12-03 16:31:10 +0000 UTC" firstStartedPulling="2025-12-03 16:31:27.906107534 +0000 UTC m=+784.825443957" lastFinishedPulling="2025-12-03 16:31:40.693451054 +0000 UTC m=+797.612787467" observedRunningTime="2025-12-03 16:31:41.266359347 +0000 UTC m=+798.185695810" watchObservedRunningTime="2025-12-03 16:31:41.272553996 +0000 UTC m=+798.191890439" Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.306094 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" podStartSLOduration=18.712714022 podStartE2EDuration="31.306075932s" podCreationTimestamp="2025-12-03 16:31:10 +0000 UTC" firstStartedPulling="2025-12-03 16:31:28.126837455 +0000 UTC m=+785.046173878" lastFinishedPulling="2025-12-03 16:31:40.720199365 +0000 UTC m=+797.639535788" observedRunningTime="2025-12-03 16:31:41.301666592 +0000 UTC m=+798.221003015" watchObservedRunningTime="2025-12-03 16:31:41.306075932 +0000 UTC m=+798.225412365" Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.324643 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8" podStartSLOduration=17.407615505 podStartE2EDuration="31.324625289s" podCreationTimestamp="2025-12-03 16:31:10 +0000 UTC" firstStartedPulling="2025-12-03 16:31:26.798204895 +0000 UTC m=+783.717541318" lastFinishedPulling="2025-12-03 16:31:40.715214679 +0000 UTC m=+797.634551102" observedRunningTime="2025-12-03 16:31:41.322873901 +0000 UTC m=+798.242210344" watchObservedRunningTime="2025-12-03 16:31:41.324625289 +0000 UTC m=+798.243961712" Dec 03 16:31:41 crc kubenswrapper[4768]: I1203 16:31:41.344087 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" podStartSLOduration=19.457766566 podStartE2EDuration="31.34406894s" podCreationTimestamp="2025-12-03 16:31:10 +0000 UTC" firstStartedPulling="2025-12-03 16:31:28.829208313 +0000 UTC m=+785.748544736" lastFinishedPulling="2025-12-03 16:31:40.715510687 +0000 UTC m=+797.634847110" observedRunningTime="2025-12-03 16:31:41.340939545 +0000 UTC m=+798.260275968" watchObservedRunningTime="2025-12-03 16:31:41.34406894 +0000 UTC m=+798.263405363" Dec 03 16:31:42 crc kubenswrapper[4768]: I1203 16:31:42.238269 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-7wglg" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.286528 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-hrj6l" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.596283 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k2zl7"] Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.596941 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.602538 4768 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-f58mx" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.602845 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.603078 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.612650 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-btgph"] Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.613398 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-btgph" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.615308 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k2zl7"] Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.615544 4768 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-ddv64" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.620649 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4l62p"] Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.622349 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.625677 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-btgph"] Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.627174 4768 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-rqqqt" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.631783 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4l62p"] Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.643691 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpd9g\" (UniqueName: \"kubernetes.io/projected/540cd9b8-af71-4d61-bdab-50850c4eec6d-kube-api-access-qpd9g\") pod \"cert-manager-cainjector-7f985d654d-k2zl7\" (UID: \"540cd9b8-af71-4d61-bdab-50850c4eec6d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.643735 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd7b5\" (UniqueName: \"kubernetes.io/projected/27f311f3-af6d-45b4-8e9f-b4437d56350c-kube-api-access-zd7b5\") pod \"cert-manager-5b446d88c5-btgph\" (UID: \"27f311f3-af6d-45b4-8e9f-b4437d56350c\") " pod="cert-manager/cert-manager-5b446d88c5-btgph" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.643782 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csthc\" (UniqueName: \"kubernetes.io/projected/ae9d8259-9e85-404b-8a1e-909147ffb4a7-kube-api-access-csthc\") pod \"cert-manager-webhook-5655c58dd6-4l62p\" (UID: \"ae9d8259-9e85-404b-8a1e-909147ffb4a7\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.744460 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpd9g\" (UniqueName: \"kubernetes.io/projected/540cd9b8-af71-4d61-bdab-50850c4eec6d-kube-api-access-qpd9g\") pod \"cert-manager-cainjector-7f985d654d-k2zl7\" (UID: \"540cd9b8-af71-4d61-bdab-50850c4eec6d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.744508 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd7b5\" (UniqueName: \"kubernetes.io/projected/27f311f3-af6d-45b4-8e9f-b4437d56350c-kube-api-access-zd7b5\") pod \"cert-manager-5b446d88c5-btgph\" (UID: \"27f311f3-af6d-45b4-8e9f-b4437d56350c\") " pod="cert-manager/cert-manager-5b446d88c5-btgph" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.744549 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csthc\" (UniqueName: \"kubernetes.io/projected/ae9d8259-9e85-404b-8a1e-909147ffb4a7-kube-api-access-csthc\") pod \"cert-manager-webhook-5655c58dd6-4l62p\" (UID: \"ae9d8259-9e85-404b-8a1e-909147ffb4a7\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.768160 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpd9g\" (UniqueName: \"kubernetes.io/projected/540cd9b8-af71-4d61-bdab-50850c4eec6d-kube-api-access-qpd9g\") pod \"cert-manager-cainjector-7f985d654d-k2zl7\" (UID: \"540cd9b8-af71-4d61-bdab-50850c4eec6d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.768336 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csthc\" (UniqueName: \"kubernetes.io/projected/ae9d8259-9e85-404b-8a1e-909147ffb4a7-kube-api-access-csthc\") pod \"cert-manager-webhook-5655c58dd6-4l62p\" (UID: \"ae9d8259-9e85-404b-8a1e-909147ffb4a7\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.780282 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd7b5\" (UniqueName: \"kubernetes.io/projected/27f311f3-af6d-45b4-8e9f-b4437d56350c-kube-api-access-zd7b5\") pod \"cert-manager-5b446d88c5-btgph\" (UID: \"27f311f3-af6d-45b4-8e9f-b4437d56350c\") " pod="cert-manager/cert-manager-5b446d88c5-btgph" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.928557 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.938427 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-btgph" Dec 03 16:31:51 crc kubenswrapper[4768]: I1203 16:31:51.950186 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" Dec 03 16:31:52 crc kubenswrapper[4768]: I1203 16:31:52.261519 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-btgph"] Dec 03 16:31:52 crc kubenswrapper[4768]: I1203 16:31:52.282212 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-btgph" event={"ID":"27f311f3-af6d-45b4-8e9f-b4437d56350c","Type":"ContainerStarted","Data":"3dd6aa92ef08255f51e60181c9ce283a4a99692204be654bf835a6a1eefd41fa"} Dec 03 16:31:52 crc kubenswrapper[4768]: I1203 16:31:52.488956 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-k2zl7"] Dec 03 16:31:52 crc kubenswrapper[4768]: W1203 16:31:52.489243 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae9d8259_9e85_404b_8a1e_909147ffb4a7.slice/crio-b74602509e0c0df295881326b781b8cb98141f46a15f704b552aca64b27f04a2 WatchSource:0}: Error finding container b74602509e0c0df295881326b781b8cb98141f46a15f704b552aca64b27f04a2: Status 404 returned error can't find the container with id b74602509e0c0df295881326b781b8cb98141f46a15f704b552aca64b27f04a2 Dec 03 16:31:52 crc kubenswrapper[4768]: I1203 16:31:52.492894 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-4l62p"] Dec 03 16:31:52 crc kubenswrapper[4768]: W1203 16:31:52.497311 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod540cd9b8_af71_4d61_bdab_50850c4eec6d.slice/crio-15d14def01aa151d00919ed53da284b994e0f5f2405705be11ae906233a50668 WatchSource:0}: Error finding container 15d14def01aa151d00919ed53da284b994e0f5f2405705be11ae906233a50668: Status 404 returned error can't find the container with id 15d14def01aa151d00919ed53da284b994e0f5f2405705be11ae906233a50668 Dec 03 16:31:53 crc kubenswrapper[4768]: I1203 16:31:53.296284 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" event={"ID":"540cd9b8-af71-4d61-bdab-50850c4eec6d","Type":"ContainerStarted","Data":"15d14def01aa151d00919ed53da284b994e0f5f2405705be11ae906233a50668"} Dec 03 16:31:53 crc kubenswrapper[4768]: I1203 16:31:53.297691 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" event={"ID":"ae9d8259-9e85-404b-8a1e-909147ffb4a7","Type":"ContainerStarted","Data":"b74602509e0c0df295881326b781b8cb98141f46a15f704b552aca64b27f04a2"} Dec 03 16:31:57 crc kubenswrapper[4768]: I1203 16:31:57.324231 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-btgph" event={"ID":"27f311f3-af6d-45b4-8e9f-b4437d56350c","Type":"ContainerStarted","Data":"1fda2fbdb85144922772ff4315128a9eaa8306a5c5b81e7ca2b6c803591087a7"} Dec 03 16:31:57 crc kubenswrapper[4768]: I1203 16:31:57.327751 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" event={"ID":"540cd9b8-af71-4d61-bdab-50850c4eec6d","Type":"ContainerStarted","Data":"12feffca1148ff2538924c463490df4bc6aac203b152510d15585ef509bf43ad"} Dec 03 16:31:57 crc kubenswrapper[4768]: I1203 16:31:57.328952 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" event={"ID":"ae9d8259-9e85-404b-8a1e-909147ffb4a7","Type":"ContainerStarted","Data":"5bb590d12f6c68e4386b171f4691bfaaef7ea61366e4893e4f6a0dbdd3b32a96"} Dec 03 16:31:57 crc kubenswrapper[4768]: I1203 16:31:57.329102 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" Dec 03 16:31:57 crc kubenswrapper[4768]: I1203 16:31:57.346240 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-btgph" podStartSLOduration=3.02301547 podStartE2EDuration="6.346220873s" podCreationTimestamp="2025-12-03 16:31:51 +0000 UTC" firstStartedPulling="2025-12-03 16:31:52.266573873 +0000 UTC m=+809.185910296" lastFinishedPulling="2025-12-03 16:31:55.589779276 +0000 UTC m=+812.509115699" observedRunningTime="2025-12-03 16:31:57.343076537 +0000 UTC m=+814.262412960" watchObservedRunningTime="2025-12-03 16:31:57.346220873 +0000 UTC m=+814.265557296" Dec 03 16:31:57 crc kubenswrapper[4768]: I1203 16:31:57.387979 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-k2zl7" podStartSLOduration=2.107080136 podStartE2EDuration="6.387952663s" podCreationTimestamp="2025-12-03 16:31:51 +0000 UTC" firstStartedPulling="2025-12-03 16:31:52.499484827 +0000 UTC m=+809.418821250" lastFinishedPulling="2025-12-03 16:31:56.780357354 +0000 UTC m=+813.699693777" observedRunningTime="2025-12-03 16:31:57.3827315 +0000 UTC m=+814.302067913" watchObservedRunningTime="2025-12-03 16:31:57.387952663 +0000 UTC m=+814.307289106" Dec 03 16:31:57 crc kubenswrapper[4768]: I1203 16:31:57.433467 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" podStartSLOduration=2.156881246 podStartE2EDuration="6.433449306s" podCreationTimestamp="2025-12-03 16:31:51 +0000 UTC" firstStartedPulling="2025-12-03 16:31:52.491525099 +0000 UTC m=+809.410861522" lastFinishedPulling="2025-12-03 16:31:56.768093159 +0000 UTC m=+813.687429582" observedRunningTime="2025-12-03 16:31:57.430777653 +0000 UTC m=+814.350114076" watchObservedRunningTime="2025-12-03 16:31:57.433449306 +0000 UTC m=+814.352785729" Dec 03 16:32:01 crc kubenswrapper[4768]: I1203 16:32:01.960234 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-4l62p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.620758 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p"] Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.623809 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.626151 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.636814 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p"] Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.744527 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-util\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.744773 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d95tl\" (UniqueName: \"kubernetes.io/projected/4a915754-d391-4329-91eb-40f99fcebdad-kube-api-access-d95tl\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.744817 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-bundle\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.846972 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d95tl\" (UniqueName: \"kubernetes.io/projected/4a915754-d391-4329-91eb-40f99fcebdad-kube-api-access-d95tl\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.847071 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-bundle\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.847191 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-util\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.848274 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-bundle\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.848417 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-util\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.875635 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d95tl\" (UniqueName: \"kubernetes.io/projected/4a915754-d391-4329-91eb-40f99fcebdad-kube-api-access-d95tl\") pod \"142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:27 crc kubenswrapper[4768]: I1203 16:32:27.941812 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:28 crc kubenswrapper[4768]: I1203 16:32:28.446309 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p"] Dec 03 16:32:28 crc kubenswrapper[4768]: W1203 16:32:28.468805 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4a915754_d391_4329_91eb_40f99fcebdad.slice/crio-3185b25562eabd2446c9cb2e1137d3ca37fc1386155f27d9c2c6fd53cee0b73b WatchSource:0}: Error finding container 3185b25562eabd2446c9cb2e1137d3ca37fc1386155f27d9c2c6fd53cee0b73b: Status 404 returned error can't find the container with id 3185b25562eabd2446c9cb2e1137d3ca37fc1386155f27d9c2c6fd53cee0b73b Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.381393 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" event={"ID":"4a915754-d391-4329-91eb-40f99fcebdad","Type":"ContainerStarted","Data":"3185b25562eabd2446c9cb2e1137d3ca37fc1386155f27d9c2c6fd53cee0b73b"} Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.726777 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sbv55"] Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.729453 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.751151 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sbv55"] Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.790722 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx47t\" (UniqueName: \"kubernetes.io/projected/74739a17-5a6c-4c53-a927-47003c5d03d8-kube-api-access-vx47t\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.790849 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-utilities\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.790892 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-catalog-content\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.892148 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx47t\" (UniqueName: \"kubernetes.io/projected/74739a17-5a6c-4c53-a927-47003c5d03d8-kube-api-access-vx47t\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.892255 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-utilities\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.892302 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-catalog-content\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.892966 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-catalog-content\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.893122 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-utilities\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:29 crc kubenswrapper[4768]: I1203 16:32:29.927211 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx47t\" (UniqueName: \"kubernetes.io/projected/74739a17-5a6c-4c53-a927-47003c5d03d8-kube-api-access-vx47t\") pod \"redhat-operators-sbv55\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.052498 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.312273 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sbv55"] Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.400400 4768 generic.go:334] "Generic (PLEG): container finished" podID="4a915754-d391-4329-91eb-40f99fcebdad" containerID="0f700a3e14954be51eb1d838469773722c1742da4c151a6ec7b7d1308aa955d7" exitCode=0 Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.400504 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" event={"ID":"4a915754-d391-4329-91eb-40f99fcebdad","Type":"ContainerDied","Data":"0f700a3e14954be51eb1d838469773722c1742da4c151a6ec7b7d1308aa955d7"} Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.404721 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbv55" event={"ID":"74739a17-5a6c-4c53-a927-47003c5d03d8","Type":"ContainerStarted","Data":"5d30f6233e065d981158b5c09dd3b9157d318b1d2c7f4d7e15291ad0eb62de3f"} Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.450674 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.451369 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.453857 4768 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-p6s9s" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.454603 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.459178 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.462935 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.603363 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw47k\" (UniqueName: \"kubernetes.io/projected/d6d418f2-9cf3-41a9-9fe9-5f2872908d9f-kube-api-access-pw47k\") pod \"minio\" (UID: \"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f\") " pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.603483 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\") pod \"minio\" (UID: \"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f\") " pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.705140 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\") pod \"minio\" (UID: \"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f\") " pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.705242 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw47k\" (UniqueName: \"kubernetes.io/projected/d6d418f2-9cf3-41a9-9fe9-5f2872908d9f-kube-api-access-pw47k\") pod \"minio\" (UID: \"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f\") " pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.709263 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.709306 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\") pod \"minio\" (UID: \"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6577ca2fd103c8fa7336d7a525c35acd0c2ac8b131bffa14970682fc13fa89b2/globalmount\"" pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.725273 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw47k\" (UniqueName: \"kubernetes.io/projected/d6d418f2-9cf3-41a9-9fe9-5f2872908d9f-kube-api-access-pw47k\") pod \"minio\" (UID: \"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f\") " pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.744230 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-edc90adf-4754-423e-b165-6f6ab0175b3b\") pod \"minio\" (UID: \"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f\") " pod="minio-dev/minio" Dec 03 16:32:30 crc kubenswrapper[4768]: I1203 16:32:30.763846 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 03 16:32:31 crc kubenswrapper[4768]: I1203 16:32:31.190709 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 03 16:32:31 crc kubenswrapper[4768]: W1203 16:32:31.198520 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6d418f2_9cf3_41a9_9fe9_5f2872908d9f.slice/crio-d531c3cee4d2ed9e326c490ce02976876fd45ba1d04c9c5a61f1e3fe587df346 WatchSource:0}: Error finding container d531c3cee4d2ed9e326c490ce02976876fd45ba1d04c9c5a61f1e3fe587df346: Status 404 returned error can't find the container with id d531c3cee4d2ed9e326c490ce02976876fd45ba1d04c9c5a61f1e3fe587df346 Dec 03 16:32:31 crc kubenswrapper[4768]: I1203 16:32:31.414403 4768 generic.go:334] "Generic (PLEG): container finished" podID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerID="f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a" exitCode=0 Dec 03 16:32:31 crc kubenswrapper[4768]: I1203 16:32:31.414464 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbv55" event={"ID":"74739a17-5a6c-4c53-a927-47003c5d03d8","Type":"ContainerDied","Data":"f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a"} Dec 03 16:32:31 crc kubenswrapper[4768]: I1203 16:32:31.416921 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f","Type":"ContainerStarted","Data":"d531c3cee4d2ed9e326c490ce02976876fd45ba1d04c9c5a61f1e3fe587df346"} Dec 03 16:32:32 crc kubenswrapper[4768]: I1203 16:32:32.428635 4768 generic.go:334] "Generic (PLEG): container finished" podID="4a915754-d391-4329-91eb-40f99fcebdad" containerID="3975595f9471f1b1f87207929a45516946a5c45a56caa36bf710649fc741de79" exitCode=0 Dec 03 16:32:32 crc kubenswrapper[4768]: I1203 16:32:32.428717 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" event={"ID":"4a915754-d391-4329-91eb-40f99fcebdad","Type":"ContainerDied","Data":"3975595f9471f1b1f87207929a45516946a5c45a56caa36bf710649fc741de79"} Dec 03 16:32:33 crc kubenswrapper[4768]: I1203 16:32:33.443821 4768 generic.go:334] "Generic (PLEG): container finished" podID="4a915754-d391-4329-91eb-40f99fcebdad" containerID="c7d64eb14f56de96891ec2b22bb8b489184e86e3617245c455c8c663f639b54c" exitCode=0 Dec 03 16:32:33 crc kubenswrapper[4768]: I1203 16:32:33.443892 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" event={"ID":"4a915754-d391-4329-91eb-40f99fcebdad","Type":"ContainerDied","Data":"c7d64eb14f56de96891ec2b22bb8b489184e86e3617245c455c8c663f639b54c"} Dec 03 16:32:33 crc kubenswrapper[4768]: I1203 16:32:33.448033 4768 generic.go:334] "Generic (PLEG): container finished" podID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerID="eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08" exitCode=0 Dec 03 16:32:33 crc kubenswrapper[4768]: I1203 16:32:33.448077 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbv55" event={"ID":"74739a17-5a6c-4c53-a927-47003c5d03d8","Type":"ContainerDied","Data":"eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08"} Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.106114 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.266170 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-bundle\") pod \"4a915754-d391-4329-91eb-40f99fcebdad\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.266810 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-util\") pod \"4a915754-d391-4329-91eb-40f99fcebdad\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.266903 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d95tl\" (UniqueName: \"kubernetes.io/projected/4a915754-d391-4329-91eb-40f99fcebdad-kube-api-access-d95tl\") pod \"4a915754-d391-4329-91eb-40f99fcebdad\" (UID: \"4a915754-d391-4329-91eb-40f99fcebdad\") " Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.267881 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-bundle" (OuterVolumeSpecName: "bundle") pod "4a915754-d391-4329-91eb-40f99fcebdad" (UID: "4a915754-d391-4329-91eb-40f99fcebdad"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.272528 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a915754-d391-4329-91eb-40f99fcebdad-kube-api-access-d95tl" (OuterVolumeSpecName: "kube-api-access-d95tl") pod "4a915754-d391-4329-91eb-40f99fcebdad" (UID: "4a915754-d391-4329-91eb-40f99fcebdad"). InnerVolumeSpecName "kube-api-access-d95tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.287775 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-util" (OuterVolumeSpecName: "util") pod "4a915754-d391-4329-91eb-40f99fcebdad" (UID: "4a915754-d391-4329-91eb-40f99fcebdad"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.368275 4768 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.368325 4768 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4a915754-d391-4329-91eb-40f99fcebdad-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.368344 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d95tl\" (UniqueName: \"kubernetes.io/projected/4a915754-d391-4329-91eb-40f99fcebdad-kube-api-access-d95tl\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.465304 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" event={"ID":"4a915754-d391-4329-91eb-40f99fcebdad","Type":"ContainerDied","Data":"3185b25562eabd2446c9cb2e1137d3ca37fc1386155f27d9c2c6fd53cee0b73b"} Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.465357 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3185b25562eabd2446c9cb2e1137d3ca37fc1386155f27d9c2c6fd53cee0b73b" Dec 03 16:32:35 crc kubenswrapper[4768]: I1203 16:32:35.465406 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p" Dec 03 16:32:36 crc kubenswrapper[4768]: I1203 16:32:36.473073 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"d6d418f2-9cf3-41a9-9fe9-5f2872908d9f","Type":"ContainerStarted","Data":"5ebed60b2401329296bfdf7d9f74760bc1a7a50f5230771f493be762ac76b4f8"} Dec 03 16:32:36 crc kubenswrapper[4768]: I1203 16:32:36.477181 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbv55" event={"ID":"74739a17-5a6c-4c53-a927-47003c5d03d8","Type":"ContainerStarted","Data":"8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc"} Dec 03 16:32:36 crc kubenswrapper[4768]: I1203 16:32:36.493870 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.98751222 podStartE2EDuration="9.493845634s" podCreationTimestamp="2025-12-03 16:32:27 +0000 UTC" firstStartedPulling="2025-12-03 16:32:31.200766118 +0000 UTC m=+848.120102541" lastFinishedPulling="2025-12-03 16:32:35.707099492 +0000 UTC m=+852.626435955" observedRunningTime="2025-12-03 16:32:36.487348852 +0000 UTC m=+853.406685315" watchObservedRunningTime="2025-12-03 16:32:36.493845634 +0000 UTC m=+853.413182087" Dec 03 16:32:36 crc kubenswrapper[4768]: I1203 16:32:36.522616 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sbv55" podStartSLOduration=3.255315374 podStartE2EDuration="7.522581162s" podCreationTimestamp="2025-12-03 16:32:29 +0000 UTC" firstStartedPulling="2025-12-03 16:32:31.417826402 +0000 UTC m=+848.337162845" lastFinishedPulling="2025-12-03 16:32:35.68509221 +0000 UTC m=+852.604428633" observedRunningTime="2025-12-03 16:32:36.520277431 +0000 UTC m=+853.439613904" watchObservedRunningTime="2025-12-03 16:32:36.522581162 +0000 UTC m=+853.441917605" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.053265 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.053626 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.804875 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb"] Dec 03 16:32:40 crc kubenswrapper[4768]: E1203 16:32:40.805152 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a915754-d391-4329-91eb-40f99fcebdad" containerName="pull" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.805169 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a915754-d391-4329-91eb-40f99fcebdad" containerName="pull" Dec 03 16:32:40 crc kubenswrapper[4768]: E1203 16:32:40.805189 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a915754-d391-4329-91eb-40f99fcebdad" containerName="extract" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.805197 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a915754-d391-4329-91eb-40f99fcebdad" containerName="extract" Dec 03 16:32:40 crc kubenswrapper[4768]: E1203 16:32:40.805208 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a915754-d391-4329-91eb-40f99fcebdad" containerName="util" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.805218 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a915754-d391-4329-91eb-40f99fcebdad" containerName="util" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.805357 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a915754-d391-4329-91eb-40f99fcebdad" containerName="extract" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.806271 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.809137 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.815141 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb"] Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.943522 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.943615 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzggm\" (UniqueName: \"kubernetes.io/projected/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-kube-api-access-qzggm\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:40 crc kubenswrapper[4768]: I1203 16:32:40.943636 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.045293 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.045382 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzggm\" (UniqueName: \"kubernetes.io/projected/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-kube-api-access-qzggm\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.045414 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.045869 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-util\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.046006 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-bundle\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.082399 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzggm\" (UniqueName: \"kubernetes.io/projected/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-kube-api-access-qzggm\") pod \"03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.111922 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sbv55" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="registry-server" probeResult="failure" output=< Dec 03 16:32:41 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 16:32:41 crc kubenswrapper[4768]: > Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.122619 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.336882 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb"] Dec 03 16:32:41 crc kubenswrapper[4768]: I1203 16:32:41.511854 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" event={"ID":"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3","Type":"ContainerStarted","Data":"315bfedbe63343e995ce197813106b10d9e146c93b35bef157990ebec4fe1df4"} Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.830664 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz"] Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.832074 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.857518 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.859842 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.859869 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.859906 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.859905 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.859849 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-nwq8b" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.891242 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-apiservice-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.891623 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-webhook-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.891652 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.891680 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/08a42ef1-90ae-4368-870e-e9bda0d806b0-manager-config\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.891719 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwkqn\" (UniqueName: \"kubernetes.io/projected/08a42ef1-90ae-4368-870e-e9bda0d806b0-kube-api-access-xwkqn\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.891406 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz"] Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.992857 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-apiservice-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.992935 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-webhook-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.992960 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.992984 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/08a42ef1-90ae-4368-870e-e9bda0d806b0-manager-config\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.993044 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwkqn\" (UniqueName: \"kubernetes.io/projected/08a42ef1-90ae-4368-870e-e9bda0d806b0-kube-api-access-xwkqn\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:42 crc kubenswrapper[4768]: I1203 16:32:42.994607 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/08a42ef1-90ae-4368-870e-e9bda0d806b0-manager-config\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.002645 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.002886 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-webhook-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.012204 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/08a42ef1-90ae-4368-870e-e9bda0d806b0-apiservice-cert\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.033838 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwkqn\" (UniqueName: \"kubernetes.io/projected/08a42ef1-90ae-4368-870e-e9bda0d806b0-kube-api-access-xwkqn\") pod \"loki-operator-controller-manager-6f7789658f-x4dsz\" (UID: \"08a42ef1-90ae-4368-870e-e9bda0d806b0\") " pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.147275 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.343075 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz"] Dec 03 16:32:43 crc kubenswrapper[4768]: W1203 16:32:43.350697 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod08a42ef1_90ae_4368_870e_e9bda0d806b0.slice/crio-dde4b90f3da234354f0260f1df649649f1b8066ac58dcacbde0198ef2fdd636b WatchSource:0}: Error finding container dde4b90f3da234354f0260f1df649649f1b8066ac58dcacbde0198ef2fdd636b: Status 404 returned error can't find the container with id dde4b90f3da234354f0260f1df649649f1b8066ac58dcacbde0198ef2fdd636b Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.525056 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" event={"ID":"08a42ef1-90ae-4368-870e-e9bda0d806b0","Type":"ContainerStarted","Data":"dde4b90f3da234354f0260f1df649649f1b8066ac58dcacbde0198ef2fdd636b"} Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.526590 4768 generic.go:334] "Generic (PLEG): container finished" podID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerID="98d13671e429c80fdcfafa93fbb9fd880854860cbfc4df6cf937db92b32453a5" exitCode=0 Dec 03 16:32:43 crc kubenswrapper[4768]: I1203 16:32:43.526635 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" event={"ID":"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3","Type":"ContainerDied","Data":"98d13671e429c80fdcfafa93fbb9fd880854860cbfc4df6cf937db92b32453a5"} Dec 03 16:32:46 crc kubenswrapper[4768]: I1203 16:32:46.546926 4768 generic.go:334] "Generic (PLEG): container finished" podID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerID="0002a9d3756e66e0e964405ee7be05ca90a5a250bc5656b177143ac9cd26b8c1" exitCode=0 Dec 03 16:32:46 crc kubenswrapper[4768]: I1203 16:32:46.547205 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" event={"ID":"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3","Type":"ContainerDied","Data":"0002a9d3756e66e0e964405ee7be05ca90a5a250bc5656b177143ac9cd26b8c1"} Dec 03 16:32:47 crc kubenswrapper[4768]: I1203 16:32:47.555292 4768 generic.go:334] "Generic (PLEG): container finished" podID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerID="85223a1f558e258771dc3d4b1171170c7c903313e26dc5c6d58c5c3a5580e03d" exitCode=0 Dec 03 16:32:47 crc kubenswrapper[4768]: I1203 16:32:47.555332 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" event={"ID":"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3","Type":"ContainerDied","Data":"85223a1f558e258771dc3d4b1171170c7c903313e26dc5c6d58c5c3a5580e03d"} Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.100462 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.149432 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.191116 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.291452 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-util\") pod \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.291523 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-bundle\") pod \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.291561 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzggm\" (UniqueName: \"kubernetes.io/projected/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-kube-api-access-qzggm\") pod \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\" (UID: \"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3\") " Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.292642 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-bundle" (OuterVolumeSpecName: "bundle") pod "45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" (UID: "45f2ba97-fe29-4c2e-949f-3e7d0243d7e3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.295112 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-kube-api-access-qzggm" (OuterVolumeSpecName: "kube-api-access-qzggm") pod "45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" (UID: "45f2ba97-fe29-4c2e-949f-3e7d0243d7e3"). InnerVolumeSpecName "kube-api-access-qzggm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.305446 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-util" (OuterVolumeSpecName: "util") pod "45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" (UID: "45f2ba97-fe29-4c2e-949f-3e7d0243d7e3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.338156 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sbv55"] Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.393455 4768 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.393490 4768 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.393503 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzggm\" (UniqueName: \"kubernetes.io/projected/45f2ba97-fe29-4c2e-949f-3e7d0243d7e3-kube-api-access-qzggm\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.576025 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" event={"ID":"45f2ba97-fe29-4c2e-949f-3e7d0243d7e3","Type":"ContainerDied","Data":"315bfedbe63343e995ce197813106b10d9e146c93b35bef157990ebec4fe1df4"} Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.576047 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.576076 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="315bfedbe63343e995ce197813106b10d9e146c93b35bef157990ebec4fe1df4" Dec 03 16:32:50 crc kubenswrapper[4768]: I1203 16:32:50.577967 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" event={"ID":"08a42ef1-90ae-4368-870e-e9bda0d806b0","Type":"ContainerStarted","Data":"f05d588ae21a6e77559f3d24c5a5e0519e6d6874a066b3c1484d64bbb0c557a9"} Dec 03 16:32:51 crc kubenswrapper[4768]: I1203 16:32:51.585474 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-sbv55" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="registry-server" containerID="cri-o://8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc" gracePeriod=2 Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.038180 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.227006 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-catalog-content\") pod \"74739a17-5a6c-4c53-a927-47003c5d03d8\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.227054 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx47t\" (UniqueName: \"kubernetes.io/projected/74739a17-5a6c-4c53-a927-47003c5d03d8-kube-api-access-vx47t\") pod \"74739a17-5a6c-4c53-a927-47003c5d03d8\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.227138 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-utilities\") pod \"74739a17-5a6c-4c53-a927-47003c5d03d8\" (UID: \"74739a17-5a6c-4c53-a927-47003c5d03d8\") " Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.227948 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-utilities" (OuterVolumeSpecName: "utilities") pod "74739a17-5a6c-4c53-a927-47003c5d03d8" (UID: "74739a17-5a6c-4c53-a927-47003c5d03d8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.232655 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74739a17-5a6c-4c53-a927-47003c5d03d8-kube-api-access-vx47t" (OuterVolumeSpecName: "kube-api-access-vx47t") pod "74739a17-5a6c-4c53-a927-47003c5d03d8" (UID: "74739a17-5a6c-4c53-a927-47003c5d03d8"). InnerVolumeSpecName "kube-api-access-vx47t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.328250 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.328288 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx47t\" (UniqueName: \"kubernetes.io/projected/74739a17-5a6c-4c53-a927-47003c5d03d8-kube-api-access-vx47t\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.338997 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "74739a17-5a6c-4c53-a927-47003c5d03d8" (UID: "74739a17-5a6c-4c53-a927-47003c5d03d8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.429756 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74739a17-5a6c-4c53-a927-47003c5d03d8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.595768 4768 generic.go:334] "Generic (PLEG): container finished" podID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerID="8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc" exitCode=0 Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.595811 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbv55" event={"ID":"74739a17-5a6c-4c53-a927-47003c5d03d8","Type":"ContainerDied","Data":"8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc"} Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.595839 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sbv55" event={"ID":"74739a17-5a6c-4c53-a927-47003c5d03d8","Type":"ContainerDied","Data":"5d30f6233e065d981158b5c09dd3b9157d318b1d2c7f4d7e15291ad0eb62de3f"} Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.595852 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sbv55" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.595860 4768 scope.go:117] "RemoveContainer" containerID="8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.629271 4768 scope.go:117] "RemoveContainer" containerID="eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.630245 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-sbv55"] Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.634441 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-sbv55"] Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.652217 4768 scope.go:117] "RemoveContainer" containerID="f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.690767 4768 scope.go:117] "RemoveContainer" containerID="8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc" Dec 03 16:32:52 crc kubenswrapper[4768]: E1203 16:32:52.691439 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc\": container with ID starting with 8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc not found: ID does not exist" containerID="8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.691496 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc"} err="failed to get container status \"8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc\": rpc error: code = NotFound desc = could not find container \"8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc\": container with ID starting with 8825c461cd847565e1839ee154157e9c39a328cc7c52cafef640dc9585477fcc not found: ID does not exist" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.691533 4768 scope.go:117] "RemoveContainer" containerID="eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08" Dec 03 16:32:52 crc kubenswrapper[4768]: E1203 16:32:52.692009 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08\": container with ID starting with eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08 not found: ID does not exist" containerID="eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.692044 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08"} err="failed to get container status \"eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08\": rpc error: code = NotFound desc = could not find container \"eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08\": container with ID starting with eaeae1def828844131317b342543cc1173e761a06df410f41fa4203583b3fd08 not found: ID does not exist" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.692073 4768 scope.go:117] "RemoveContainer" containerID="f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a" Dec 03 16:32:52 crc kubenswrapper[4768]: E1203 16:32:52.692362 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a\": container with ID starting with f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a not found: ID does not exist" containerID="f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a" Dec 03 16:32:52 crc kubenswrapper[4768]: I1203 16:32:52.692382 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a"} err="failed to get container status \"f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a\": rpc error: code = NotFound desc = could not find container \"f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a\": container with ID starting with f1c8f35f2cbbb6e79944dd6dd60ca56c578e105a0a8e4cfe6819c17c87deae0a not found: ID does not exist" Dec 03 16:32:53 crc kubenswrapper[4768]: I1203 16:32:53.554989 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" path="/var/lib/kubelet/pods/74739a17-5a6c-4c53-a927-47003c5d03d8/volumes" Dec 03 16:32:57 crc kubenswrapper[4768]: I1203 16:32:57.641492 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" event={"ID":"08a42ef1-90ae-4368-870e-e9bda0d806b0","Type":"ContainerStarted","Data":"9118327f4dabe41337408970abc6a072c588f58240104276b94162af6837458f"} Dec 03 16:32:57 crc kubenswrapper[4768]: I1203 16:32:57.641954 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:57 crc kubenswrapper[4768]: I1203 16:32:57.644164 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" Dec 03 16:32:57 crc kubenswrapper[4768]: I1203 16:32:57.664436 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-6f7789658f-x4dsz" podStartSLOduration=2.5115016470000002 podStartE2EDuration="15.664411496s" podCreationTimestamp="2025-12-03 16:32:42 +0000 UTC" firstStartedPulling="2025-12-03 16:32:43.352983285 +0000 UTC m=+860.272319708" lastFinishedPulling="2025-12-03 16:32:56.505893124 +0000 UTC m=+873.425229557" observedRunningTime="2025-12-03 16:32:57.659285901 +0000 UTC m=+874.578622344" watchObservedRunningTime="2025-12-03 16:32:57.664411496 +0000 UTC m=+874.583747919" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.040575 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mrk7t"] Dec 03 16:33:21 crc kubenswrapper[4768]: E1203 16:33:21.041559 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerName="extract" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041574 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerName="extract" Dec 03 16:33:21 crc kubenswrapper[4768]: E1203 16:33:21.041611 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="extract-content" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041621 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="extract-content" Dec 03 16:33:21 crc kubenswrapper[4768]: E1203 16:33:21.041631 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerName="util" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041639 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerName="util" Dec 03 16:33:21 crc kubenswrapper[4768]: E1203 16:33:21.041658 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="registry-server" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041665 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="registry-server" Dec 03 16:33:21 crc kubenswrapper[4768]: E1203 16:33:21.041674 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="extract-utilities" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041681 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="extract-utilities" Dec 03 16:33:21 crc kubenswrapper[4768]: E1203 16:33:21.041690 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerName="pull" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041698 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerName="pull" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041823 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="74739a17-5a6c-4c53-a927-47003c5d03d8" containerName="registry-server" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.041860 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="45f2ba97-fe29-4c2e-949f-3e7d0243d7e3" containerName="extract" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.042838 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.056814 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mrk7t"] Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.175744 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-utilities\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.175849 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx88x\" (UniqueName: \"kubernetes.io/projected/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-kube-api-access-mx88x\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.175888 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-catalog-content\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.277242 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-catalog-content\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.277407 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-utilities\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.277504 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx88x\" (UniqueName: \"kubernetes.io/projected/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-kube-api-access-mx88x\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.278205 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-catalog-content\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.278348 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-utilities\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.301695 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx88x\" (UniqueName: \"kubernetes.io/projected/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-kube-api-access-mx88x\") pod \"redhat-marketplace-mrk7t\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.381395 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:21 crc kubenswrapper[4768]: I1203 16:33:21.885737 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mrk7t"] Dec 03 16:33:22 crc kubenswrapper[4768]: I1203 16:33:22.846540 4768 generic.go:334] "Generic (PLEG): container finished" podID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerID="d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f" exitCode=0 Dec 03 16:33:22 crc kubenswrapper[4768]: I1203 16:33:22.846628 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mrk7t" event={"ID":"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f","Type":"ContainerDied","Data":"d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f"} Dec 03 16:33:22 crc kubenswrapper[4768]: I1203 16:33:22.847037 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mrk7t" event={"ID":"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f","Type":"ContainerStarted","Data":"2a8d5b69c2fc9b4323b9e9a491d9052239e6d2c50afc9949ca7d4bbb1019bd2e"} Dec 03 16:33:23 crc kubenswrapper[4768]: I1203 16:33:23.857372 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mrk7t" event={"ID":"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f","Type":"ContainerStarted","Data":"dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a"} Dec 03 16:33:24 crc kubenswrapper[4768]: I1203 16:33:24.868727 4768 generic.go:334] "Generic (PLEG): container finished" podID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerID="dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a" exitCode=0 Dec 03 16:33:24 crc kubenswrapper[4768]: I1203 16:33:24.868786 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mrk7t" event={"ID":"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f","Type":"ContainerDied","Data":"dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a"} Dec 03 16:33:25 crc kubenswrapper[4768]: I1203 16:33:25.881373 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mrk7t" event={"ID":"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f","Type":"ContainerStarted","Data":"4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19"} Dec 03 16:33:25 crc kubenswrapper[4768]: I1203 16:33:25.911380 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mrk7t" podStartSLOduration=2.264669947 podStartE2EDuration="4.911348467s" podCreationTimestamp="2025-12-03 16:33:21 +0000 UTC" firstStartedPulling="2025-12-03 16:33:22.848802392 +0000 UTC m=+899.768138825" lastFinishedPulling="2025-12-03 16:33:25.495480912 +0000 UTC m=+902.414817345" observedRunningTime="2025-12-03 16:33:25.908262436 +0000 UTC m=+902.827598879" watchObservedRunningTime="2025-12-03 16:33:25.911348467 +0000 UTC m=+902.830684930" Dec 03 16:33:26 crc kubenswrapper[4768]: I1203 16:33:26.028802 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:33:26 crc kubenswrapper[4768]: I1203 16:33:26.028896 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.637712 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7"] Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.639261 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.641540 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.654718 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7"] Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.684964 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.685070 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.685104 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2m8j\" (UniqueName: \"kubernetes.io/projected/3e3bf1c3-070b-49af-98bd-be91dbd82bae-kube-api-access-n2m8j\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.785810 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.785874 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2m8j\" (UniqueName: \"kubernetes.io/projected/3e3bf1c3-070b-49af-98bd-be91dbd82bae-kube-api-access-n2m8j\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.785966 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.786411 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.786466 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.826524 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2m8j\" (UniqueName: \"kubernetes.io/projected/3e3bf1c3-070b-49af-98bd-be91dbd82bae-kube-api-access-n2m8j\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:28 crc kubenswrapper[4768]: I1203 16:33:28.961174 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:29 crc kubenswrapper[4768]: I1203 16:33:29.378483 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7"] Dec 03 16:33:29 crc kubenswrapper[4768]: W1203 16:33:29.382285 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e3bf1c3_070b_49af_98bd_be91dbd82bae.slice/crio-74c76f9665d532c5376dae1ea533a5d423da54fe686729d579d593e01716a5fb WatchSource:0}: Error finding container 74c76f9665d532c5376dae1ea533a5d423da54fe686729d579d593e01716a5fb: Status 404 returned error can't find the container with id 74c76f9665d532c5376dae1ea533a5d423da54fe686729d579d593e01716a5fb Dec 03 16:33:29 crc kubenswrapper[4768]: I1203 16:33:29.907968 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" event={"ID":"3e3bf1c3-070b-49af-98bd-be91dbd82bae","Type":"ContainerStarted","Data":"74c76f9665d532c5376dae1ea533a5d423da54fe686729d579d593e01716a5fb"} Dec 03 16:33:31 crc kubenswrapper[4768]: I1203 16:33:31.382330 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:31 crc kubenswrapper[4768]: I1203 16:33:31.382414 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:31 crc kubenswrapper[4768]: I1203 16:33:31.438831 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:31 crc kubenswrapper[4768]: I1203 16:33:31.921514 4768 generic.go:334] "Generic (PLEG): container finished" podID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerID="12e2862a5cdca50327ed76518445c58e6e47f9740f49b04ce81769c3eb98b32a" exitCode=0 Dec 03 16:33:31 crc kubenswrapper[4768]: I1203 16:33:31.921550 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" event={"ID":"3e3bf1c3-070b-49af-98bd-be91dbd82bae","Type":"ContainerDied","Data":"12e2862a5cdca50327ed76518445c58e6e47f9740f49b04ce81769c3eb98b32a"} Dec 03 16:33:31 crc kubenswrapper[4768]: I1203 16:33:31.987211 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:33 crc kubenswrapper[4768]: I1203 16:33:33.801448 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mrk7t"] Dec 03 16:33:33 crc kubenswrapper[4768]: I1203 16:33:33.938230 4768 generic.go:334] "Generic (PLEG): container finished" podID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerID="3c9a7acb46ba3eaf5bd52651e1a8669db10e44725517c9adf24428ddb1530c0e" exitCode=0 Dec 03 16:33:33 crc kubenswrapper[4768]: I1203 16:33:33.938341 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" event={"ID":"3e3bf1c3-070b-49af-98bd-be91dbd82bae","Type":"ContainerDied","Data":"3c9a7acb46ba3eaf5bd52651e1a8669db10e44725517c9adf24428ddb1530c0e"} Dec 03 16:33:33 crc kubenswrapper[4768]: I1203 16:33:33.938741 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mrk7t" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="registry-server" containerID="cri-o://4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19" gracePeriod=2 Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.343695 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.365661 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx88x\" (UniqueName: \"kubernetes.io/projected/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-kube-api-access-mx88x\") pod \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.365748 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-utilities\") pod \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.365819 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-catalog-content\") pod \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\" (UID: \"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f\") " Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.372738 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-kube-api-access-mx88x" (OuterVolumeSpecName: "kube-api-access-mx88x") pod "9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" (UID: "9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f"). InnerVolumeSpecName "kube-api-access-mx88x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.377368 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-utilities" (OuterVolumeSpecName: "utilities") pod "9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" (UID: "9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.386132 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" (UID: "9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.467883 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx88x\" (UniqueName: \"kubernetes.io/projected/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-kube-api-access-mx88x\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.467914 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.467926 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.949092 4768 generic.go:334] "Generic (PLEG): container finished" podID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerID="33f8a2d5ddd3ca39f725529dea26910426decdf74b228c5cc88a33b130d559cf" exitCode=0 Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.949160 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" event={"ID":"3e3bf1c3-070b-49af-98bd-be91dbd82bae","Type":"ContainerDied","Data":"33f8a2d5ddd3ca39f725529dea26910426decdf74b228c5cc88a33b130d559cf"} Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.953263 4768 generic.go:334] "Generic (PLEG): container finished" podID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerID="4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19" exitCode=0 Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.953306 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mrk7t" event={"ID":"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f","Type":"ContainerDied","Data":"4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19"} Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.953340 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mrk7t" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.953375 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mrk7t" event={"ID":"9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f","Type":"ContainerDied","Data":"2a8d5b69c2fc9b4323b9e9a491d9052239e6d2c50afc9949ca7d4bbb1019bd2e"} Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.953399 4768 scope.go:117] "RemoveContainer" containerID="4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19" Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.986221 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mrk7t"] Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.990712 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mrk7t"] Dec 03 16:33:34 crc kubenswrapper[4768]: I1203 16:33:34.995872 4768 scope.go:117] "RemoveContainer" containerID="dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.029016 4768 scope.go:117] "RemoveContainer" containerID="d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.057334 4768 scope.go:117] "RemoveContainer" containerID="4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19" Dec 03 16:33:35 crc kubenswrapper[4768]: E1203 16:33:35.058198 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19\": container with ID starting with 4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19 not found: ID does not exist" containerID="4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.058342 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19"} err="failed to get container status \"4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19\": rpc error: code = NotFound desc = could not find container \"4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19\": container with ID starting with 4c9bacfa405fcf25408620541b5faa3e732879ed09451e1dc258bdbef59fee19 not found: ID does not exist" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.058465 4768 scope.go:117] "RemoveContainer" containerID="dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a" Dec 03 16:33:35 crc kubenswrapper[4768]: E1203 16:33:35.059086 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a\": container with ID starting with dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a not found: ID does not exist" containerID="dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.059272 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a"} err="failed to get container status \"dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a\": rpc error: code = NotFound desc = could not find container \"dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a\": container with ID starting with dda6511f224ebb07fe2a506c604fb90bfc0591b8f2cc34a1dd48336f4d36e28a not found: ID does not exist" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.059401 4768 scope.go:117] "RemoveContainer" containerID="d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f" Dec 03 16:33:35 crc kubenswrapper[4768]: E1203 16:33:35.059858 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f\": container with ID starting with d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f not found: ID does not exist" containerID="d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.059897 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f"} err="failed to get container status \"d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f\": rpc error: code = NotFound desc = could not find container \"d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f\": container with ID starting with d30827340c7fb40f2816d2ffd31da3d48a9e0ff451f35a53cbee81ebbef6287f not found: ID does not exist" Dec 03 16:33:35 crc kubenswrapper[4768]: I1203 16:33:35.541912 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" path="/var/lib/kubelet/pods/9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f/volumes" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.277393 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.293285 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-bundle\") pod \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.293423 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-util\") pod \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.293505 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2m8j\" (UniqueName: \"kubernetes.io/projected/3e3bf1c3-070b-49af-98bd-be91dbd82bae-kube-api-access-n2m8j\") pod \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\" (UID: \"3e3bf1c3-070b-49af-98bd-be91dbd82bae\") " Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.294011 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-bundle" (OuterVolumeSpecName: "bundle") pod "3e3bf1c3-070b-49af-98bd-be91dbd82bae" (UID: "3e3bf1c3-070b-49af-98bd-be91dbd82bae"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.295344 4768 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.300960 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e3bf1c3-070b-49af-98bd-be91dbd82bae-kube-api-access-n2m8j" (OuterVolumeSpecName: "kube-api-access-n2m8j") pod "3e3bf1c3-070b-49af-98bd-be91dbd82bae" (UID: "3e3bf1c3-070b-49af-98bd-be91dbd82bae"). InnerVolumeSpecName "kube-api-access-n2m8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.331298 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-util" (OuterVolumeSpecName: "util") pod "3e3bf1c3-070b-49af-98bd-be91dbd82bae" (UID: "3e3bf1c3-070b-49af-98bd-be91dbd82bae"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.397128 4768 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3e3bf1c3-070b-49af-98bd-be91dbd82bae-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.397181 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2m8j\" (UniqueName: \"kubernetes.io/projected/3e3bf1c3-070b-49af-98bd-be91dbd82bae-kube-api-access-n2m8j\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.969834 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" event={"ID":"3e3bf1c3-070b-49af-98bd-be91dbd82bae","Type":"ContainerDied","Data":"74c76f9665d532c5376dae1ea533a5d423da54fe686729d579d593e01716a5fb"} Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.969913 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74c76f9665d532c5376dae1ea533a5d423da54fe686729d579d593e01716a5fb" Dec 03 16:33:36 crc kubenswrapper[4768]: I1203 16:33:36.970187 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879177 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7"] Dec 03 16:33:40 crc kubenswrapper[4768]: E1203 16:33:40.879627 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerName="pull" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879639 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerName="pull" Dec 03 16:33:40 crc kubenswrapper[4768]: E1203 16:33:40.879652 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerName="util" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879658 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerName="util" Dec 03 16:33:40 crc kubenswrapper[4768]: E1203 16:33:40.879667 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerName="extract" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879673 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerName="extract" Dec 03 16:33:40 crc kubenswrapper[4768]: E1203 16:33:40.879683 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="registry-server" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879689 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="registry-server" Dec 03 16:33:40 crc kubenswrapper[4768]: E1203 16:33:40.879700 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="extract-utilities" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879706 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="extract-utilities" Dec 03 16:33:40 crc kubenswrapper[4768]: E1203 16:33:40.879714 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="extract-content" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879719 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="extract-content" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879805 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e3bf1c3-070b-49af-98bd-be91dbd82bae" containerName="extract" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.879821 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cb9dfe8-24c0-4efc-a2c9-4dc6d414fe9f" containerName="registry-server" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.880204 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.884535 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.885226 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-kqqtt" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.885264 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.907673 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7"] Dec 03 16:33:40 crc kubenswrapper[4768]: I1203 16:33:40.957314 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzgff\" (UniqueName: \"kubernetes.io/projected/e4518900-d9d4-4ffd-a217-d8506b6d3027-kube-api-access-fzgff\") pod \"nmstate-operator-5b5b58f5c8-b4qp7\" (UID: \"e4518900-d9d4-4ffd-a217-d8506b6d3027\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" Dec 03 16:33:41 crc kubenswrapper[4768]: I1203 16:33:41.058678 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzgff\" (UniqueName: \"kubernetes.io/projected/e4518900-d9d4-4ffd-a217-d8506b6d3027-kube-api-access-fzgff\") pod \"nmstate-operator-5b5b58f5c8-b4qp7\" (UID: \"e4518900-d9d4-4ffd-a217-d8506b6d3027\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" Dec 03 16:33:41 crc kubenswrapper[4768]: I1203 16:33:41.080092 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzgff\" (UniqueName: \"kubernetes.io/projected/e4518900-d9d4-4ffd-a217-d8506b6d3027-kube-api-access-fzgff\") pod \"nmstate-operator-5b5b58f5c8-b4qp7\" (UID: \"e4518900-d9d4-4ffd-a217-d8506b6d3027\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" Dec 03 16:33:41 crc kubenswrapper[4768]: I1203 16:33:41.198970 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" Dec 03 16:33:41 crc kubenswrapper[4768]: I1203 16:33:41.666804 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7"] Dec 03 16:33:42 crc kubenswrapper[4768]: I1203 16:33:42.002030 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" event={"ID":"e4518900-d9d4-4ffd-a217-d8506b6d3027","Type":"ContainerStarted","Data":"d381fbae8bfa9f436bb9d2dd3f4a37338dd444c51a011bc392db15c23d8589d9"} Dec 03 16:33:43 crc kubenswrapper[4768]: I1203 16:33:43.998654 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6knsv"] Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.000346 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.009873 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6knsv"] Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.021653 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" event={"ID":"e4518900-d9d4-4ffd-a217-d8506b6d3027","Type":"ContainerStarted","Data":"1aeda8d364faf3911f6b689efe7f54767d5f1c85a4a863bd9b3823617d78be73"} Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.122905 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-utilities\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.122990 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-catalog-content\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.123023 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wm4l\" (UniqueName: \"kubernetes.io/projected/ff52fc74-9747-4195-b685-08b78b1cb928-kube-api-access-4wm4l\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.224473 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-utilities\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.224584 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-catalog-content\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.224643 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wm4l\" (UniqueName: \"kubernetes.io/projected/ff52fc74-9747-4195-b685-08b78b1cb928-kube-api-access-4wm4l\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.225133 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-utilities\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.225150 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-catalog-content\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.246122 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wm4l\" (UniqueName: \"kubernetes.io/projected/ff52fc74-9747-4195-b685-08b78b1cb928-kube-api-access-4wm4l\") pod \"community-operators-6knsv\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.323417 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.762412 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-b4qp7" podStartSLOduration=2.657333545 podStartE2EDuration="4.76239058s" podCreationTimestamp="2025-12-03 16:33:40 +0000 UTC" firstStartedPulling="2025-12-03 16:33:41.673164829 +0000 UTC m=+918.592501252" lastFinishedPulling="2025-12-03 16:33:43.778221864 +0000 UTC m=+920.697558287" observedRunningTime="2025-12-03 16:33:44.048705339 +0000 UTC m=+920.968041782" watchObservedRunningTime="2025-12-03 16:33:44.76239058 +0000 UTC m=+921.681727003" Dec 03 16:33:44 crc kubenswrapper[4768]: I1203 16:33:44.766827 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6knsv"] Dec 03 16:33:44 crc kubenswrapper[4768]: W1203 16:33:44.776753 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff52fc74_9747_4195_b685_08b78b1cb928.slice/crio-1fe3110213056ec6d5dcaa29463c5141496d16ff32d6888020b9479b1363fd8e WatchSource:0}: Error finding container 1fe3110213056ec6d5dcaa29463c5141496d16ff32d6888020b9479b1363fd8e: Status 404 returned error can't find the container with id 1fe3110213056ec6d5dcaa29463c5141496d16ff32d6888020b9479b1363fd8e Dec 03 16:33:45 crc kubenswrapper[4768]: I1203 16:33:45.027609 4768 generic.go:334] "Generic (PLEG): container finished" podID="ff52fc74-9747-4195-b685-08b78b1cb928" containerID="bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359" exitCode=0 Dec 03 16:33:45 crc kubenswrapper[4768]: I1203 16:33:45.028904 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6knsv" event={"ID":"ff52fc74-9747-4195-b685-08b78b1cb928","Type":"ContainerDied","Data":"bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359"} Dec 03 16:33:45 crc kubenswrapper[4768]: I1203 16:33:45.028932 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6knsv" event={"ID":"ff52fc74-9747-4195-b685-08b78b1cb928","Type":"ContainerStarted","Data":"1fe3110213056ec6d5dcaa29463c5141496d16ff32d6888020b9479b1363fd8e"} Dec 03 16:33:46 crc kubenswrapper[4768]: I1203 16:33:46.033675 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6knsv" event={"ID":"ff52fc74-9747-4195-b685-08b78b1cb928","Type":"ContainerStarted","Data":"acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc"} Dec 03 16:33:47 crc kubenswrapper[4768]: I1203 16:33:47.044917 4768 generic.go:334] "Generic (PLEG): container finished" podID="ff52fc74-9747-4195-b685-08b78b1cb928" containerID="acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc" exitCode=0 Dec 03 16:33:47 crc kubenswrapper[4768]: I1203 16:33:47.044968 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6knsv" event={"ID":"ff52fc74-9747-4195-b685-08b78b1cb928","Type":"ContainerDied","Data":"acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc"} Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.057729 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6knsv" event={"ID":"ff52fc74-9747-4195-b685-08b78b1cb928","Type":"ContainerStarted","Data":"a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb"} Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.087560 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6knsv" podStartSLOduration=2.465496712 podStartE2EDuration="5.087529993s" podCreationTimestamp="2025-12-03 16:33:43 +0000 UTC" firstStartedPulling="2025-12-03 16:33:45.029879416 +0000 UTC m=+921.949215839" lastFinishedPulling="2025-12-03 16:33:47.651912657 +0000 UTC m=+924.571249120" observedRunningTime="2025-12-03 16:33:48.081424642 +0000 UTC m=+925.000761105" watchObservedRunningTime="2025-12-03 16:33:48.087529993 +0000 UTC m=+925.006866446" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.606038 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kx4dv"] Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.607323 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.631730 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kx4dv"] Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.788742 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-utilities\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.788794 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-catalog-content\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.788859 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfl26\" (UniqueName: \"kubernetes.io/projected/32e0a715-9cff-4943-b603-bf688f3f240c-kube-api-access-rfl26\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.890387 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-utilities\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.890450 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-catalog-content\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.890485 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfl26\" (UniqueName: \"kubernetes.io/projected/32e0a715-9cff-4943-b603-bf688f3f240c-kube-api-access-rfl26\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.890933 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-utilities\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.890977 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-catalog-content\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.910358 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfl26\" (UniqueName: \"kubernetes.io/projected/32e0a715-9cff-4943-b603-bf688f3f240c-kube-api-access-rfl26\") pod \"certified-operators-kx4dv\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:48 crc kubenswrapper[4768]: I1203 16:33:48.931669 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:49 crc kubenswrapper[4768]: W1203 16:33:49.173722 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32e0a715_9cff_4943_b603_bf688f3f240c.slice/crio-3265e1a3818d8fb3311ad0d33e2debb71c2f03e838fb6ce9d2a39adf96d23227 WatchSource:0}: Error finding container 3265e1a3818d8fb3311ad0d33e2debb71c2f03e838fb6ce9d2a39adf96d23227: Status 404 returned error can't find the container with id 3265e1a3818d8fb3311ad0d33e2debb71c2f03e838fb6ce9d2a39adf96d23227 Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.174473 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kx4dv"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.799894 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.801399 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.806038 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-rxq84" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.812713 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.832650 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.833380 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.842208 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.858122 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.860782 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-v6x5p"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.861480 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.904531 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d339ee13-d547-4fa6-a7bb-17eabe43d15c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-727lm\" (UID: \"d339ee13-d547-4fa6-a7bb-17eabe43d15c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.904613 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-nmstate-lock\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.905149 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c594\" (UniqueName: \"kubernetes.io/projected/3f27fdbd-f4fd-462c-9931-deb08bc97037-kube-api-access-8c594\") pod \"nmstate-metrics-7f946cbc9-ggfpn\" (UID: \"3f27fdbd-f4fd-462c-9931-deb08bc97037\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.905200 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-ovs-socket\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.905231 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-dbus-socket\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.905256 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdrb9\" (UniqueName: \"kubernetes.io/projected/d339ee13-d547-4fa6-a7bb-17eabe43d15c-kube-api-access-gdrb9\") pod \"nmstate-webhook-5f6d4c5ccb-727lm\" (UID: \"d339ee13-d547-4fa6-a7bb-17eabe43d15c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.905294 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqcmk\" (UniqueName: \"kubernetes.io/projected/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-kube-api-access-qqcmk\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.959559 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.960421 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.963631 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-6stbs" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.963857 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.970798 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg"] Dec 03 16:33:49 crc kubenswrapper[4768]: I1203 16:33:49.972799 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.005942 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.005986 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d339ee13-d547-4fa6-a7bb-17eabe43d15c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-727lm\" (UID: \"d339ee13-d547-4fa6-a7bb-17eabe43d15c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006037 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-nmstate-lock\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006058 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c594\" (UniqueName: \"kubernetes.io/projected/3f27fdbd-f4fd-462c-9931-deb08bc97037-kube-api-access-8c594\") pod \"nmstate-metrics-7f946cbc9-ggfpn\" (UID: \"3f27fdbd-f4fd-462c-9931-deb08bc97037\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006082 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-ovs-socket\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006097 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-dbus-socket\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006118 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdrb9\" (UniqueName: \"kubernetes.io/projected/d339ee13-d547-4fa6-a7bb-17eabe43d15c-kube-api-access-gdrb9\") pod \"nmstate-webhook-5f6d4c5ccb-727lm\" (UID: \"d339ee13-d547-4fa6-a7bb-17eabe43d15c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006136 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft2hs\" (UniqueName: \"kubernetes.io/projected/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-kube-api-access-ft2hs\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: E1203 16:33:50.006156 4768 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006211 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-ovs-socket\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: E1203 16:33:50.006222 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d339ee13-d547-4fa6-a7bb-17eabe43d15c-tls-key-pair podName:d339ee13-d547-4fa6-a7bb-17eabe43d15c nodeName:}" failed. No retries permitted until 2025-12-03 16:33:50.506202064 +0000 UTC m=+927.425538487 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/d339ee13-d547-4fa6-a7bb-17eabe43d15c-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-727lm" (UID: "d339ee13-d547-4fa6-a7bb-17eabe43d15c") : secret "openshift-nmstate-webhook" not found Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006285 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-nmstate-lock\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006166 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqcmk\" (UniqueName: \"kubernetes.io/projected/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-kube-api-access-qqcmk\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006353 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.006532 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-dbus-socket\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.023352 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdrb9\" (UniqueName: \"kubernetes.io/projected/d339ee13-d547-4fa6-a7bb-17eabe43d15c-kube-api-access-gdrb9\") pod \"nmstate-webhook-5f6d4c5ccb-727lm\" (UID: \"d339ee13-d547-4fa6-a7bb-17eabe43d15c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.029527 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c594\" (UniqueName: \"kubernetes.io/projected/3f27fdbd-f4fd-462c-9931-deb08bc97037-kube-api-access-8c594\") pod \"nmstate-metrics-7f946cbc9-ggfpn\" (UID: \"3f27fdbd-f4fd-462c-9931-deb08bc97037\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.031843 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqcmk\" (UniqueName: \"kubernetes.io/projected/6aff85f3-8f5e-44d9-be27-1bc63b1d8a38-kube-api-access-qqcmk\") pod \"nmstate-handler-v6x5p\" (UID: \"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38\") " pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.071837 4768 generic.go:334] "Generic (PLEG): container finished" podID="32e0a715-9cff-4943-b603-bf688f3f240c" containerID="bcdde82a7a5283da0219d49477e381c52a1375047e0b7e21815393bd7cc0666f" exitCode=0 Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.071878 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kx4dv" event={"ID":"32e0a715-9cff-4943-b603-bf688f3f240c","Type":"ContainerDied","Data":"bcdde82a7a5283da0219d49477e381c52a1375047e0b7e21815393bd7cc0666f"} Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.071902 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kx4dv" event={"ID":"32e0a715-9cff-4943-b603-bf688f3f240c","Type":"ContainerStarted","Data":"3265e1a3818d8fb3311ad0d33e2debb71c2f03e838fb6ce9d2a39adf96d23227"} Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.106800 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft2hs\" (UniqueName: \"kubernetes.io/projected/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-kube-api-access-ft2hs\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.107031 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.107124 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.108294 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.116705 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.124938 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.154216 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft2hs\" (UniqueName: \"kubernetes.io/projected/fee7ecf6-cf74-41de-b6f7-16e83ab2cd84-kube-api-access-ft2hs\") pod \"nmstate-console-plugin-7fbb5f6569-79jcg\" (UID: \"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.157361 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5b5b758475-rw29r"] Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.161065 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.169415 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b5b758475-rw29r"] Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.184207 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.208343 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-service-ca\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.208377 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-trusted-ca-bundle\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.208484 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/835572e3-ced7-49a7-b130-d70191b75a04-console-serving-cert\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.208552 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-console-config\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.208579 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-oauth-serving-cert\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.208676 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/835572e3-ced7-49a7-b130-d70191b75a04-console-oauth-config\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.208719 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6pc6\" (UniqueName: \"kubernetes.io/projected/835572e3-ced7-49a7-b130-d70191b75a04-kube-api-access-m6pc6\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: W1203 16:33:50.210418 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6aff85f3_8f5e_44d9_be27_1bc63b1d8a38.slice/crio-a2324f28241beb4b6457b83bb1972ef8caecfcbb11b5a3fc030b6228dcef62c6 WatchSource:0}: Error finding container a2324f28241beb4b6457b83bb1972ef8caecfcbb11b5a3fc030b6228dcef62c6: Status 404 returned error can't find the container with id a2324f28241beb4b6457b83bb1972ef8caecfcbb11b5a3fc030b6228dcef62c6 Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.276493 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.310119 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/835572e3-ced7-49a7-b130-d70191b75a04-console-serving-cert\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.310181 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-console-config\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.310201 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-oauth-serving-cert\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.310222 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/835572e3-ced7-49a7-b130-d70191b75a04-console-oauth-config\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.310244 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6pc6\" (UniqueName: \"kubernetes.io/projected/835572e3-ced7-49a7-b130-d70191b75a04-kube-api-access-m6pc6\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.310277 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-service-ca\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.310293 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-trusted-ca-bundle\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.311407 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-trusted-ca-bundle\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.312707 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-service-ca\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.313363 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-console-config\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.313943 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/835572e3-ced7-49a7-b130-d70191b75a04-oauth-serving-cert\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.314393 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/835572e3-ced7-49a7-b130-d70191b75a04-console-oauth-config\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.318092 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/835572e3-ced7-49a7-b130-d70191b75a04-console-serving-cert\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.327213 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6pc6\" (UniqueName: \"kubernetes.io/projected/835572e3-ced7-49a7-b130-d70191b75a04-kube-api-access-m6pc6\") pod \"console-5b5b758475-rw29r\" (UID: \"835572e3-ced7-49a7-b130-d70191b75a04\") " pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.364382 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn"] Dec 03 16:33:50 crc kubenswrapper[4768]: W1203 16:33:50.366843 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f27fdbd_f4fd_462c_9931_deb08bc97037.slice/crio-d31185f92009a978102fec5107030f2d92554de76c4f74760f9dfcaa28fa3bf5 WatchSource:0}: Error finding container d31185f92009a978102fec5107030f2d92554de76c4f74760f9dfcaa28fa3bf5: Status 404 returned error can't find the container with id d31185f92009a978102fec5107030f2d92554de76c4f74760f9dfcaa28fa3bf5 Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.476649 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg"] Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.503927 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.513209 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d339ee13-d547-4fa6-a7bb-17eabe43d15c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-727lm\" (UID: \"d339ee13-d547-4fa6-a7bb-17eabe43d15c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.516735 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/d339ee13-d547-4fa6-a7bb-17eabe43d15c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-727lm\" (UID: \"d339ee13-d547-4fa6-a7bb-17eabe43d15c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.720998 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5b5b758475-rw29r"] Dec 03 16:33:50 crc kubenswrapper[4768]: I1203 16:33:50.758164 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:51 crc kubenswrapper[4768]: I1203 16:33:51.079121 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-v6x5p" event={"ID":"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38","Type":"ContainerStarted","Data":"a2324f28241beb4b6457b83bb1972ef8caecfcbb11b5a3fc030b6228dcef62c6"} Dec 03 16:33:51 crc kubenswrapper[4768]: I1203 16:33:51.079867 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" event={"ID":"3f27fdbd-f4fd-462c-9931-deb08bc97037","Type":"ContainerStarted","Data":"d31185f92009a978102fec5107030f2d92554de76c4f74760f9dfcaa28fa3bf5"} Dec 03 16:33:51 crc kubenswrapper[4768]: I1203 16:33:51.080494 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" event={"ID":"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84","Type":"ContainerStarted","Data":"53b615ce925cb39d148636422d4529448a652ddb8d21b0c93b21e2d091ca3d54"} Dec 03 16:33:51 crc kubenswrapper[4768]: I1203 16:33:51.081521 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b5b758475-rw29r" event={"ID":"835572e3-ced7-49a7-b130-d70191b75a04","Type":"ContainerStarted","Data":"ccdefaae8c0dabef057ad7ec087f505f7e3ccaed0379556b358caf939ae4835d"} Dec 03 16:33:51 crc kubenswrapper[4768]: I1203 16:33:51.081546 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5b5b758475-rw29r" event={"ID":"835572e3-ced7-49a7-b130-d70191b75a04","Type":"ContainerStarted","Data":"596407f8c7b874f753694aecc09117b1077659196971b9935ce9505e75322163"} Dec 03 16:33:51 crc kubenswrapper[4768]: I1203 16:33:51.102643 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5b5b758475-rw29r" podStartSLOduration=1.102626546 podStartE2EDuration="1.102626546s" podCreationTimestamp="2025-12-03 16:33:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:33:51.099813902 +0000 UTC m=+928.019150325" watchObservedRunningTime="2025-12-03 16:33:51.102626546 +0000 UTC m=+928.021962969" Dec 03 16:33:51 crc kubenswrapper[4768]: I1203 16:33:51.200795 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm"] Dec 03 16:33:52 crc kubenswrapper[4768]: I1203 16:33:52.092223 4768 generic.go:334] "Generic (PLEG): container finished" podID="32e0a715-9cff-4943-b603-bf688f3f240c" containerID="f7d67adc4b495207dfaf331834f90f7cbdb631e1f92a23d691b4d65ef113206c" exitCode=0 Dec 03 16:33:52 crc kubenswrapper[4768]: I1203 16:33:52.092469 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kx4dv" event={"ID":"32e0a715-9cff-4943-b603-bf688f3f240c","Type":"ContainerDied","Data":"f7d67adc4b495207dfaf331834f90f7cbdb631e1f92a23d691b4d65ef113206c"} Dec 03 16:33:52 crc kubenswrapper[4768]: I1203 16:33:52.094445 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" event={"ID":"d339ee13-d547-4fa6-a7bb-17eabe43d15c","Type":"ContainerStarted","Data":"89e5821ce1f5e177e11f90e7971eb2422eb393fa3df564a81499389233fe5bbe"} Dec 03 16:33:54 crc kubenswrapper[4768]: I1203 16:33:54.324321 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:54 crc kubenswrapper[4768]: I1203 16:33:54.324668 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:54 crc kubenswrapper[4768]: I1203 16:33:54.371368 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.113140 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" event={"ID":"d339ee13-d547-4fa6-a7bb-17eabe43d15c","Type":"ContainerStarted","Data":"37aa0e32993ac1a751a5398ebd38fa542a0bff42eb6a50ebf39f75867b43ef13"} Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.113685 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.115858 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" event={"ID":"3f27fdbd-f4fd-462c-9931-deb08bc97037","Type":"ContainerStarted","Data":"13988ce9f3743181910d8f33bb469fcee7ec169a416c1087e890454d4a218896"} Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.117768 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" event={"ID":"fee7ecf6-cf74-41de-b6f7-16e83ab2cd84","Type":"ContainerStarted","Data":"097f37a6d9333f01920896460a2adcf41202d096e5e8c6f220f2d3eccc248e54"} Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.122441 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kx4dv" event={"ID":"32e0a715-9cff-4943-b603-bf688f3f240c","Type":"ContainerStarted","Data":"2b324fa74684fd5b4cbc750c3b2e5ec9d7647179268d64235a1da1d7f1624edf"} Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.128295 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-v6x5p" event={"ID":"6aff85f3-8f5e-44d9-be27-1bc63b1d8a38","Type":"ContainerStarted","Data":"54395be95fddc618055d2c96f51322ab9a439ed8e78b6f0052ea8bad0b7d973f"} Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.133448 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" podStartSLOduration=3.01360986 podStartE2EDuration="6.133427008s" podCreationTimestamp="2025-12-03 16:33:49 +0000 UTC" firstStartedPulling="2025-12-03 16:33:51.220456949 +0000 UTC m=+928.139793362" lastFinishedPulling="2025-12-03 16:33:54.340274067 +0000 UTC m=+931.259610510" observedRunningTime="2025-12-03 16:33:55.130701476 +0000 UTC m=+932.050037909" watchObservedRunningTime="2025-12-03 16:33:55.133427008 +0000 UTC m=+932.052763431" Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.160368 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-79jcg" podStartSLOduration=2.310075456 podStartE2EDuration="6.160343329s" podCreationTimestamp="2025-12-03 16:33:49 +0000 UTC" firstStartedPulling="2025-12-03 16:33:50.485469794 +0000 UTC m=+927.404806217" lastFinishedPulling="2025-12-03 16:33:54.335737617 +0000 UTC m=+931.255074090" observedRunningTime="2025-12-03 16:33:55.154463544 +0000 UTC m=+932.073799977" watchObservedRunningTime="2025-12-03 16:33:55.160343329 +0000 UTC m=+932.079679752" Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.186035 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.198222 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-v6x5p" podStartSLOduration=2.07438423 podStartE2EDuration="6.19820815s" podCreationTimestamp="2025-12-03 16:33:49 +0000 UTC" firstStartedPulling="2025-12-03 16:33:50.212338209 +0000 UTC m=+927.131674632" lastFinishedPulling="2025-12-03 16:33:54.336162119 +0000 UTC m=+931.255498552" observedRunningTime="2025-12-03 16:33:55.197721937 +0000 UTC m=+932.117058370" watchObservedRunningTime="2025-12-03 16:33:55.19820815 +0000 UTC m=+932.117544573" Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.199493 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kx4dv" podStartSLOduration=2.975489027 podStartE2EDuration="7.199486533s" podCreationTimestamp="2025-12-03 16:33:48 +0000 UTC" firstStartedPulling="2025-12-03 16:33:50.073364228 +0000 UTC m=+926.992700651" lastFinishedPulling="2025-12-03 16:33:54.297361734 +0000 UTC m=+931.216698157" observedRunningTime="2025-12-03 16:33:55.181797846 +0000 UTC m=+932.101134299" watchObservedRunningTime="2025-12-03 16:33:55.199486533 +0000 UTC m=+932.118822956" Dec 03 16:33:55 crc kubenswrapper[4768]: I1203 16:33:55.201022 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:56 crc kubenswrapper[4768]: I1203 16:33:56.028414 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:33:56 crc kubenswrapper[4768]: I1203 16:33:56.028811 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.153903 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" event={"ID":"3f27fdbd-f4fd-462c-9931-deb08bc97037","Type":"ContainerStarted","Data":"31bd50847ca96df7f586bd0a9d4c73ea9668afefe5ef73282ac87bdddeb684c7"} Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.199206 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6knsv"] Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.199850 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6knsv" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="registry-server" containerID="cri-o://a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb" gracePeriod=2 Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.207510 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ggfpn" podStartSLOduration=1.826639347 podStartE2EDuration="9.207490979s" podCreationTimestamp="2025-12-03 16:33:49 +0000 UTC" firstStartedPulling="2025-12-03 16:33:50.3690835 +0000 UTC m=+927.288419923" lastFinishedPulling="2025-12-03 16:33:57.749935102 +0000 UTC m=+934.669271555" observedRunningTime="2025-12-03 16:33:58.189608106 +0000 UTC m=+935.108944529" watchObservedRunningTime="2025-12-03 16:33:58.207490979 +0000 UTC m=+935.126827412" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.681102 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.850489 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-utilities\") pod \"ff52fc74-9747-4195-b685-08b78b1cb928\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.850583 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-catalog-content\") pod \"ff52fc74-9747-4195-b685-08b78b1cb928\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.850686 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wm4l\" (UniqueName: \"kubernetes.io/projected/ff52fc74-9747-4195-b685-08b78b1cb928-kube-api-access-4wm4l\") pod \"ff52fc74-9747-4195-b685-08b78b1cb928\" (UID: \"ff52fc74-9747-4195-b685-08b78b1cb928\") " Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.853883 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-utilities" (OuterVolumeSpecName: "utilities") pod "ff52fc74-9747-4195-b685-08b78b1cb928" (UID: "ff52fc74-9747-4195-b685-08b78b1cb928"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.856519 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff52fc74-9747-4195-b685-08b78b1cb928-kube-api-access-4wm4l" (OuterVolumeSpecName: "kube-api-access-4wm4l") pod "ff52fc74-9747-4195-b685-08b78b1cb928" (UID: "ff52fc74-9747-4195-b685-08b78b1cb928"). InnerVolumeSpecName "kube-api-access-4wm4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.931641 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ff52fc74-9747-4195-b685-08b78b1cb928" (UID: "ff52fc74-9747-4195-b685-08b78b1cb928"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.932037 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.932070 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.957955 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.958019 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ff52fc74-9747-4195-b685-08b78b1cb928-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.958063 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wm4l\" (UniqueName: \"kubernetes.io/projected/ff52fc74-9747-4195-b685-08b78b1cb928-kube-api-access-4wm4l\") on node \"crc\" DevicePath \"\"" Dec 03 16:33:58 crc kubenswrapper[4768]: I1203 16:33:58.976117 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.166033 4768 generic.go:334] "Generic (PLEG): container finished" podID="ff52fc74-9747-4195-b685-08b78b1cb928" containerID="a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb" exitCode=0 Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.166168 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6knsv" event={"ID":"ff52fc74-9747-4195-b685-08b78b1cb928","Type":"ContainerDied","Data":"a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb"} Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.166225 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6knsv" event={"ID":"ff52fc74-9747-4195-b685-08b78b1cb928","Type":"ContainerDied","Data":"1fe3110213056ec6d5dcaa29463c5141496d16ff32d6888020b9479b1363fd8e"} Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.166248 4768 scope.go:117] "RemoveContainer" containerID="a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.166272 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6knsv" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.193590 4768 scope.go:117] "RemoveContainer" containerID="acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.217747 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6knsv"] Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.222760 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6knsv"] Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.225547 4768 scope.go:117] "RemoveContainer" containerID="bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.248207 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.254155 4768 scope.go:117] "RemoveContainer" containerID="a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb" Dec 03 16:33:59 crc kubenswrapper[4768]: E1203 16:33:59.254761 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb\": container with ID starting with a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb not found: ID does not exist" containerID="a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.254894 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb"} err="failed to get container status \"a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb\": rpc error: code = NotFound desc = could not find container \"a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb\": container with ID starting with a2f83d5bc1d8ef18c93dc05b35e0dc466db1485a3a23688001a3b8061a62b1bb not found: ID does not exist" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.255053 4768 scope.go:117] "RemoveContainer" containerID="acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc" Dec 03 16:33:59 crc kubenswrapper[4768]: E1203 16:33:59.255498 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc\": container with ID starting with acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc not found: ID does not exist" containerID="acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.255585 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc"} err="failed to get container status \"acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc\": rpc error: code = NotFound desc = could not find container \"acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc\": container with ID starting with acb077c435476e5b20049e2790cd753afa0f16633f5a4c584028ac4800cea6cc not found: ID does not exist" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.255671 4768 scope.go:117] "RemoveContainer" containerID="bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359" Dec 03 16:33:59 crc kubenswrapper[4768]: E1203 16:33:59.256031 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359\": container with ID starting with bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359 not found: ID does not exist" containerID="bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.256065 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359"} err="failed to get container status \"bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359\": rpc error: code = NotFound desc = could not find container \"bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359\": container with ID starting with bbf6ebd4d77c23f593c600831877f8fe96e1de32ec6113fdb1c1b9b3bfa64359 not found: ID does not exist" Dec 03 16:33:59 crc kubenswrapper[4768]: I1203 16:33:59.547207 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" path="/var/lib/kubelet/pods/ff52fc74-9747-4195-b685-08b78b1cb928/volumes" Dec 03 16:34:00 crc kubenswrapper[4768]: I1203 16:34:00.226349 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-v6x5p" Dec 03 16:34:00 crc kubenswrapper[4768]: I1203 16:34:00.504549 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:34:00 crc kubenswrapper[4768]: I1203 16:34:00.504699 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:34:00 crc kubenswrapper[4768]: I1203 16:34:00.509537 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:34:01 crc kubenswrapper[4768]: I1203 16:34:01.195490 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5b5b758475-rw29r" Dec 03 16:34:01 crc kubenswrapper[4768]: I1203 16:34:01.277225 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-64v26"] Dec 03 16:34:02 crc kubenswrapper[4768]: I1203 16:34:02.602103 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kx4dv"] Dec 03 16:34:02 crc kubenswrapper[4768]: I1203 16:34:02.602882 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kx4dv" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="registry-server" containerID="cri-o://2b324fa74684fd5b4cbc750c3b2e5ec9d7647179268d64235a1da1d7f1624edf" gracePeriod=2 Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.206364 4768 generic.go:334] "Generic (PLEG): container finished" podID="32e0a715-9cff-4943-b603-bf688f3f240c" containerID="2b324fa74684fd5b4cbc750c3b2e5ec9d7647179268d64235a1da1d7f1624edf" exitCode=0 Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.206465 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kx4dv" event={"ID":"32e0a715-9cff-4943-b603-bf688f3f240c","Type":"ContainerDied","Data":"2b324fa74684fd5b4cbc750c3b2e5ec9d7647179268d64235a1da1d7f1624edf"} Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.654908 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.733210 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-catalog-content\") pod \"32e0a715-9cff-4943-b603-bf688f3f240c\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.733285 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-utilities\") pod \"32e0a715-9cff-4943-b603-bf688f3f240c\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.733358 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfl26\" (UniqueName: \"kubernetes.io/projected/32e0a715-9cff-4943-b603-bf688f3f240c-kube-api-access-rfl26\") pod \"32e0a715-9cff-4943-b603-bf688f3f240c\" (UID: \"32e0a715-9cff-4943-b603-bf688f3f240c\") " Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.734792 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-utilities" (OuterVolumeSpecName: "utilities") pod "32e0a715-9cff-4943-b603-bf688f3f240c" (UID: "32e0a715-9cff-4943-b603-bf688f3f240c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.743400 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e0a715-9cff-4943-b603-bf688f3f240c-kube-api-access-rfl26" (OuterVolumeSpecName: "kube-api-access-rfl26") pod "32e0a715-9cff-4943-b603-bf688f3f240c" (UID: "32e0a715-9cff-4943-b603-bf688f3f240c"). InnerVolumeSpecName "kube-api-access-rfl26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.813754 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32e0a715-9cff-4943-b603-bf688f3f240c" (UID: "32e0a715-9cff-4943-b603-bf688f3f240c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.834590 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.834635 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e0a715-9cff-4943-b603-bf688f3f240c-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:03 crc kubenswrapper[4768]: I1203 16:34:03.834645 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfl26\" (UniqueName: \"kubernetes.io/projected/32e0a715-9cff-4943-b603-bf688f3f240c-kube-api-access-rfl26\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:04 crc kubenswrapper[4768]: I1203 16:34:04.217418 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kx4dv" event={"ID":"32e0a715-9cff-4943-b603-bf688f3f240c","Type":"ContainerDied","Data":"3265e1a3818d8fb3311ad0d33e2debb71c2f03e838fb6ce9d2a39adf96d23227"} Dec 03 16:34:04 crc kubenswrapper[4768]: I1203 16:34:04.217501 4768 scope.go:117] "RemoveContainer" containerID="2b324fa74684fd5b4cbc750c3b2e5ec9d7647179268d64235a1da1d7f1624edf" Dec 03 16:34:04 crc kubenswrapper[4768]: I1203 16:34:04.217505 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kx4dv" Dec 03 16:34:04 crc kubenswrapper[4768]: I1203 16:34:04.272004 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kx4dv"] Dec 03 16:34:04 crc kubenswrapper[4768]: I1203 16:34:04.275118 4768 scope.go:117] "RemoveContainer" containerID="f7d67adc4b495207dfaf331834f90f7cbdb631e1f92a23d691b4d65ef113206c" Dec 03 16:34:04 crc kubenswrapper[4768]: I1203 16:34:04.276556 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kx4dv"] Dec 03 16:34:04 crc kubenswrapper[4768]: I1203 16:34:04.296651 4768 scope.go:117] "RemoveContainer" containerID="bcdde82a7a5283da0219d49477e381c52a1375047e0b7e21815393bd7cc0666f" Dec 03 16:34:05 crc kubenswrapper[4768]: I1203 16:34:05.544112 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" path="/var/lib/kubelet/pods/32e0a715-9cff-4943-b603-bf688f3f240c/volumes" Dec 03 16:34:10 crc kubenswrapper[4768]: I1203 16:34:10.768173 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-727lm" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.097053 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4"] Dec 03 16:34:25 crc kubenswrapper[4768]: E1203 16:34:25.097874 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="extract-content" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.097890 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="extract-content" Dec 03 16:34:25 crc kubenswrapper[4768]: E1203 16:34:25.097906 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="extract-content" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.097912 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="extract-content" Dec 03 16:34:25 crc kubenswrapper[4768]: E1203 16:34:25.097925 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="registry-server" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.097931 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="registry-server" Dec 03 16:34:25 crc kubenswrapper[4768]: E1203 16:34:25.097938 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="extract-utilities" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.097943 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="extract-utilities" Dec 03 16:34:25 crc kubenswrapper[4768]: E1203 16:34:25.097954 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="extract-utilities" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.097960 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="extract-utilities" Dec 03 16:34:25 crc kubenswrapper[4768]: E1203 16:34:25.097972 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="registry-server" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.097977 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="registry-server" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.098083 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e0a715-9cff-4943-b603-bf688f3f240c" containerName="registry-server" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.098102 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff52fc74-9747-4195-b685-08b78b1cb928" containerName="registry-server" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.098913 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.100403 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.113166 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4"] Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.146726 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xfzj\" (UniqueName: \"kubernetes.io/projected/8101fa8a-2d2c-4622-973c-a805443c3269-kube-api-access-4xfzj\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.147015 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.147158 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.248316 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xfzj\" (UniqueName: \"kubernetes.io/projected/8101fa8a-2d2c-4622-973c-a805443c3269-kube-api-access-4xfzj\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.248584 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.248667 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.249250 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.249297 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.268089 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xfzj\" (UniqueName: \"kubernetes.io/projected/8101fa8a-2d2c-4622-973c-a805443c3269-kube-api-access-4xfzj\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.448230 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:25 crc kubenswrapper[4768]: I1203 16:34:25.787351 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4"] Dec 03 16:34:25 crc kubenswrapper[4768]: W1203 16:34:25.791760 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8101fa8a_2d2c_4622_973c_a805443c3269.slice/crio-3bd960abea20a9c2b3eab225ed004784e41c8fe8032874426b4ac6302336bd20 WatchSource:0}: Error finding container 3bd960abea20a9c2b3eab225ed004784e41c8fe8032874426b4ac6302336bd20: Status 404 returned error can't find the container with id 3bd960abea20a9c2b3eab225ed004784e41c8fe8032874426b4ac6302336bd20 Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.028437 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.029020 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.029176 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.029946 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3989add581768ea157c22213a30e680cfe61b22aa425273f58411706a4cfe346"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.030103 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://3989add581768ea157c22213a30e680cfe61b22aa425273f58411706a4cfe346" gracePeriod=600 Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.325752 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-64v26" podUID="57775487-facb-4956-a875-a740a48628c1" containerName="console" containerID="cri-o://717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701" gracePeriod=15 Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.392763 4768 generic.go:334] "Generic (PLEG): container finished" podID="8101fa8a-2d2c-4622-973c-a805443c3269" containerID="667a84ccd158d9e3956ce3dd4c2eee8a85d133b7b59e476272b931a2bbf2c7d0" exitCode=0 Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.392811 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" event={"ID":"8101fa8a-2d2c-4622-973c-a805443c3269","Type":"ContainerDied","Data":"667a84ccd158d9e3956ce3dd4c2eee8a85d133b7b59e476272b931a2bbf2c7d0"} Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.392853 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" event={"ID":"8101fa8a-2d2c-4622-973c-a805443c3269","Type":"ContainerStarted","Data":"3bd960abea20a9c2b3eab225ed004784e41c8fe8032874426b4ac6302336bd20"} Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.397316 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="3989add581768ea157c22213a30e680cfe61b22aa425273f58411706a4cfe346" exitCode=0 Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.397357 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"3989add581768ea157c22213a30e680cfe61b22aa425273f58411706a4cfe346"} Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.397437 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"5d2288acefb9ba0668d7ad6cd6de7be9ae141e8a037ddbecc4ef7c8eb139eec9"} Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.397465 4768 scope.go:117] "RemoveContainer" containerID="82a30a2a5648147df1c33aacd2597c45c5d3751d97f69a2fe55f347f7ac5fc32" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.714880 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-64v26_57775487-facb-4956-a875-a740a48628c1/console/0.log" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.715169 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.868816 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-oauth-config\") pod \"57775487-facb-4956-a875-a740a48628c1\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.868921 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-console-config\") pod \"57775487-facb-4956-a875-a740a48628c1\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.868964 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svxkg\" (UniqueName: \"kubernetes.io/projected/57775487-facb-4956-a875-a740a48628c1-kube-api-access-svxkg\") pod \"57775487-facb-4956-a875-a740a48628c1\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.869034 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-serving-cert\") pod \"57775487-facb-4956-a875-a740a48628c1\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.869075 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-service-ca\") pod \"57775487-facb-4956-a875-a740a48628c1\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.869121 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-oauth-serving-cert\") pod \"57775487-facb-4956-a875-a740a48628c1\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.869159 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-trusted-ca-bundle\") pod \"57775487-facb-4956-a875-a740a48628c1\" (UID: \"57775487-facb-4956-a875-a740a48628c1\") " Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.869884 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-console-config" (OuterVolumeSpecName: "console-config") pod "57775487-facb-4956-a875-a740a48628c1" (UID: "57775487-facb-4956-a875-a740a48628c1"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.870078 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-service-ca" (OuterVolumeSpecName: "service-ca") pod "57775487-facb-4956-a875-a740a48628c1" (UID: "57775487-facb-4956-a875-a740a48628c1"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.870221 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "57775487-facb-4956-a875-a740a48628c1" (UID: "57775487-facb-4956-a875-a740a48628c1"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.870272 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "57775487-facb-4956-a875-a740a48628c1" (UID: "57775487-facb-4956-a875-a740a48628c1"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.877682 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "57775487-facb-4956-a875-a740a48628c1" (UID: "57775487-facb-4956-a875-a740a48628c1"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.878704 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "57775487-facb-4956-a875-a740a48628c1" (UID: "57775487-facb-4956-a875-a740a48628c1"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.880125 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57775487-facb-4956-a875-a740a48628c1-kube-api-access-svxkg" (OuterVolumeSpecName: "kube-api-access-svxkg") pod "57775487-facb-4956-a875-a740a48628c1" (UID: "57775487-facb-4956-a875-a740a48628c1"). InnerVolumeSpecName "kube-api-access-svxkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.970766 4768 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.970824 4768 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-console-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.970847 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svxkg\" (UniqueName: \"kubernetes.io/projected/57775487-facb-4956-a875-a740a48628c1-kube-api-access-svxkg\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.970865 4768 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/57775487-facb-4956-a875-a740a48628c1-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.970885 4768 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-service-ca\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.970902 4768 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:26 crc kubenswrapper[4768]: I1203 16:34:26.970918 4768 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/57775487-facb-4956-a875-a740a48628c1-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.407872 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-64v26_57775487-facb-4956-a875-a740a48628c1/console/0.log" Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.407968 4768 generic.go:334] "Generic (PLEG): container finished" podID="57775487-facb-4956-a875-a740a48628c1" containerID="717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701" exitCode=2 Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.408058 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-64v26" Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.408141 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-64v26" event={"ID":"57775487-facb-4956-a875-a740a48628c1","Type":"ContainerDied","Data":"717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701"} Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.408218 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-64v26" event={"ID":"57775487-facb-4956-a875-a740a48628c1","Type":"ContainerDied","Data":"8ffa292b27c90cb1df25907b8642bda3cc66e8d6bdbebdc80dd62f4975f85b21"} Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.408266 4768 scope.go:117] "RemoveContainer" containerID="717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701" Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.449227 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-64v26"] Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.455425 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-64v26"] Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.541247 4768 scope.go:117] "RemoveContainer" containerID="717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701" Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.546048 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57775487-facb-4956-a875-a740a48628c1" path="/var/lib/kubelet/pods/57775487-facb-4956-a875-a740a48628c1/volumes" Dec 03 16:34:27 crc kubenswrapper[4768]: E1203 16:34:27.546479 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701\": container with ID starting with 717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701 not found: ID does not exist" containerID="717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701" Dec 03 16:34:27 crc kubenswrapper[4768]: I1203 16:34:27.546507 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701"} err="failed to get container status \"717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701\": rpc error: code = NotFound desc = could not find container \"717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701\": container with ID starting with 717e6a697af39670695c614331167d021cbe3a2454098e5caa17d0cab39c1701 not found: ID does not exist" Dec 03 16:34:28 crc kubenswrapper[4768]: I1203 16:34:28.425865 4768 generic.go:334] "Generic (PLEG): container finished" podID="8101fa8a-2d2c-4622-973c-a805443c3269" containerID="e32bb749ffffdc78eb0796b4bbff0550e25791f802d167a8a8d04d5a81906277" exitCode=0 Dec 03 16:34:28 crc kubenswrapper[4768]: I1203 16:34:28.425958 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" event={"ID":"8101fa8a-2d2c-4622-973c-a805443c3269","Type":"ContainerDied","Data":"e32bb749ffffdc78eb0796b4bbff0550e25791f802d167a8a8d04d5a81906277"} Dec 03 16:34:29 crc kubenswrapper[4768]: I1203 16:34:29.438592 4768 generic.go:334] "Generic (PLEG): container finished" podID="8101fa8a-2d2c-4622-973c-a805443c3269" containerID="0c4a24ebb2c4589bcf97cdfcd56d22b368c407328c20263caa44e6d976dde8ce" exitCode=0 Dec 03 16:34:29 crc kubenswrapper[4768]: I1203 16:34:29.438788 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" event={"ID":"8101fa8a-2d2c-4622-973c-a805443c3269","Type":"ContainerDied","Data":"0c4a24ebb2c4589bcf97cdfcd56d22b368c407328c20263caa44e6d976dde8ce"} Dec 03 16:34:30 crc kubenswrapper[4768]: I1203 16:34:30.799962 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:30 crc kubenswrapper[4768]: I1203 16:34:30.944968 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-bundle\") pod \"8101fa8a-2d2c-4622-973c-a805443c3269\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " Dec 03 16:34:30 crc kubenswrapper[4768]: I1203 16:34:30.945027 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xfzj\" (UniqueName: \"kubernetes.io/projected/8101fa8a-2d2c-4622-973c-a805443c3269-kube-api-access-4xfzj\") pod \"8101fa8a-2d2c-4622-973c-a805443c3269\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " Dec 03 16:34:30 crc kubenswrapper[4768]: I1203 16:34:30.945088 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-util\") pod \"8101fa8a-2d2c-4622-973c-a805443c3269\" (UID: \"8101fa8a-2d2c-4622-973c-a805443c3269\") " Dec 03 16:34:30 crc kubenswrapper[4768]: I1203 16:34:30.947149 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-bundle" (OuterVolumeSpecName: "bundle") pod "8101fa8a-2d2c-4622-973c-a805443c3269" (UID: "8101fa8a-2d2c-4622-973c-a805443c3269"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:34:30 crc kubenswrapper[4768]: I1203 16:34:30.952018 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8101fa8a-2d2c-4622-973c-a805443c3269-kube-api-access-4xfzj" (OuterVolumeSpecName: "kube-api-access-4xfzj") pod "8101fa8a-2d2c-4622-973c-a805443c3269" (UID: "8101fa8a-2d2c-4622-973c-a805443c3269"). InnerVolumeSpecName "kube-api-access-4xfzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:34:30 crc kubenswrapper[4768]: I1203 16:34:30.962636 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-util" (OuterVolumeSpecName: "util") pod "8101fa8a-2d2c-4622-973c-a805443c3269" (UID: "8101fa8a-2d2c-4622-973c-a805443c3269"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:34:31 crc kubenswrapper[4768]: I1203 16:34:31.046082 4768 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:31 crc kubenswrapper[4768]: I1203 16:34:31.046386 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xfzj\" (UniqueName: \"kubernetes.io/projected/8101fa8a-2d2c-4622-973c-a805443c3269-kube-api-access-4xfzj\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:31 crc kubenswrapper[4768]: I1203 16:34:31.046402 4768 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8101fa8a-2d2c-4622-973c-a805443c3269-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:34:31 crc kubenswrapper[4768]: I1203 16:34:31.456372 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" event={"ID":"8101fa8a-2d2c-4622-973c-a805443c3269","Type":"ContainerDied","Data":"3bd960abea20a9c2b3eab225ed004784e41c8fe8032874426b4ac6302336bd20"} Dec 03 16:34:31 crc kubenswrapper[4768]: I1203 16:34:31.456419 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3bd960abea20a9c2b3eab225ed004784e41c8fe8032874426b4ac6302336bd20" Dec 03 16:34:31 crc kubenswrapper[4768]: I1203 16:34:31.456437 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.685005 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc"] Dec 03 16:34:39 crc kubenswrapper[4768]: E1203 16:34:39.685516 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8101fa8a-2d2c-4622-973c-a805443c3269" containerName="extract" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.685527 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8101fa8a-2d2c-4622-973c-a805443c3269" containerName="extract" Dec 03 16:34:39 crc kubenswrapper[4768]: E1203 16:34:39.685540 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8101fa8a-2d2c-4622-973c-a805443c3269" containerName="util" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.685546 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8101fa8a-2d2c-4622-973c-a805443c3269" containerName="util" Dec 03 16:34:39 crc kubenswrapper[4768]: E1203 16:34:39.685562 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57775487-facb-4956-a875-a740a48628c1" containerName="console" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.685569 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="57775487-facb-4956-a875-a740a48628c1" containerName="console" Dec 03 16:34:39 crc kubenswrapper[4768]: E1203 16:34:39.685577 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8101fa8a-2d2c-4622-973c-a805443c3269" containerName="pull" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.685582 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8101fa8a-2d2c-4622-973c-a805443c3269" containerName="pull" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.685692 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="8101fa8a-2d2c-4622-973c-a805443c3269" containerName="extract" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.685705 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="57775487-facb-4956-a875-a740a48628c1" containerName="console" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.686121 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.688201 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.688569 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.688626 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-lf2fz" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.688626 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.688637 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.697731 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc"] Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.855125 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-webhook-cert\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.855165 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4nxt\" (UniqueName: \"kubernetes.io/projected/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-kube-api-access-x4nxt\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.855189 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-apiservice-cert\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.928422 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7"] Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.929623 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.931456 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.931554 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.932213 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-868gc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.943917 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7"] Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.956506 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4nxt\" (UniqueName: \"kubernetes.io/projected/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-kube-api-access-x4nxt\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.956567 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-apiservice-cert\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.956696 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-webhook-cert\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.963077 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-webhook-cert\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.963114 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-apiservice-cert\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:39 crc kubenswrapper[4768]: I1203 16:34:39.993307 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4nxt\" (UniqueName: \"kubernetes.io/projected/7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3-kube-api-access-x4nxt\") pod \"metallb-operator-controller-manager-57f5c9498-vdmjc\" (UID: \"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3\") " pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.000063 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.058218 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-webhook-cert\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.058272 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-apiservice-cert\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.058340 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j9sq\" (UniqueName: \"kubernetes.io/projected/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-kube-api-access-4j9sq\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.160973 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j9sq\" (UniqueName: \"kubernetes.io/projected/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-kube-api-access-4j9sq\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.161450 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-webhook-cert\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.161477 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-apiservice-cert\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.166068 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-apiservice-cert\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.167478 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-webhook-cert\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.183623 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4j9sq\" (UniqueName: \"kubernetes.io/projected/6fb7770c-b85a-4bd3-9f49-dedffaeae0e3-kube-api-access-4j9sq\") pod \"metallb-operator-webhook-server-98db5c7f-g87m7\" (UID: \"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3\") " pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.242949 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.536312 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc"] Dec 03 16:34:40 crc kubenswrapper[4768]: I1203 16:34:40.667696 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7"] Dec 03 16:34:40 crc kubenswrapper[4768]: W1203 16:34:40.670792 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fb7770c_b85a_4bd3_9f49_dedffaeae0e3.slice/crio-1614ab0619ce182b3ef2a62068c9dbc406dc23b5f7739f8037be915faefe1e12 WatchSource:0}: Error finding container 1614ab0619ce182b3ef2a62068c9dbc406dc23b5f7739f8037be915faefe1e12: Status 404 returned error can't find the container with id 1614ab0619ce182b3ef2a62068c9dbc406dc23b5f7739f8037be915faefe1e12 Dec 03 16:34:41 crc kubenswrapper[4768]: I1203 16:34:41.516133 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" event={"ID":"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3","Type":"ContainerStarted","Data":"1614ab0619ce182b3ef2a62068c9dbc406dc23b5f7739f8037be915faefe1e12"} Dec 03 16:34:41 crc kubenswrapper[4768]: I1203 16:34:41.517525 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" event={"ID":"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3","Type":"ContainerStarted","Data":"34789602acf98c811004f55243a81157ed13542ff567b936a7fe8fcc4b9392e2"} Dec 03 16:34:46 crc kubenswrapper[4768]: I1203 16:34:46.564571 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" event={"ID":"6fb7770c-b85a-4bd3-9f49-dedffaeae0e3","Type":"ContainerStarted","Data":"23d94d022af3f4aa5a4cb3da460d63900c31196ff349e644a830c653b5ff9db5"} Dec 03 16:34:46 crc kubenswrapper[4768]: I1203 16:34:46.565794 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:34:46 crc kubenswrapper[4768]: I1203 16:34:46.570445 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" event={"ID":"7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3","Type":"ContainerStarted","Data":"d798c81b9898191dbbef1ef253396adc038db47d671116e8ee4928860069e03d"} Dec 03 16:34:46 crc kubenswrapper[4768]: I1203 16:34:46.570705 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:34:46 crc kubenswrapper[4768]: I1203 16:34:46.596864 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" podStartSLOduration=2.398183152 podStartE2EDuration="7.596841043s" podCreationTimestamp="2025-12-03 16:34:39 +0000 UTC" firstStartedPulling="2025-12-03 16:34:40.673528249 +0000 UTC m=+977.592864672" lastFinishedPulling="2025-12-03 16:34:45.87218615 +0000 UTC m=+982.791522563" observedRunningTime="2025-12-03 16:34:46.594350268 +0000 UTC m=+983.513686731" watchObservedRunningTime="2025-12-03 16:34:46.596841043 +0000 UTC m=+983.516177516" Dec 03 16:34:46 crc kubenswrapper[4768]: I1203 16:34:46.627723 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" podStartSLOduration=2.331206658 podStartE2EDuration="7.627706746s" podCreationTimestamp="2025-12-03 16:34:39 +0000 UTC" firstStartedPulling="2025-12-03 16:34:40.551621805 +0000 UTC m=+977.470958228" lastFinishedPulling="2025-12-03 16:34:45.848121893 +0000 UTC m=+982.767458316" observedRunningTime="2025-12-03 16:34:46.625139299 +0000 UTC m=+983.544475732" watchObservedRunningTime="2025-12-03 16:34:46.627706746 +0000 UTC m=+983.547043169" Dec 03 16:35:00 crc kubenswrapper[4768]: I1203 16:35:00.250630 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-98db5c7f-g87m7" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.003212 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-57f5c9498-vdmjc" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.829881 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z"] Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.831186 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.835852 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-k7czv"] Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.838013 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.839705 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.839918 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.840937 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.851817 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z"] Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.851821 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-2rqr5" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.908833 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-79sgk"] Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.910054 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-79sgk" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.913025 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-ljxks" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.913227 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.913370 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.913530 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.931860 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-sbbq7"] Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.933075 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.938456 4768 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.944799 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-sbbq7"] Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970290 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-metrics\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970331 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-frr-conf\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970372 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tfhl\" (UniqueName: \"kubernetes.io/projected/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-kube-api-access-5tfhl\") pod \"frr-k8s-webhook-server-7fcb986d4-nlk8z\" (UID: \"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970393 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e9282b00-b418-4626-9620-8ca6252433b2-frr-startup\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970412 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-frr-sockets\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970632 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9282b00-b418-4626-9620-8ca6252433b2-metrics-certs\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970708 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-nlk8z\" (UID: \"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970735 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-reloader\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:20 crc kubenswrapper[4768]: I1203 16:35:20.970766 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz4lf\" (UniqueName: \"kubernetes.io/projected/e9282b00-b418-4626-9620-8ca6252433b2-kube-api-access-wz4lf\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.071770 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9282b00-b418-4626-9620-8ca6252433b2-metrics-certs\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072188 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8bb39058-0f85-42fe-884e-f7ea6e389a1e-metrics-certs\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072282 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqtt9\" (UniqueName: \"kubernetes.io/projected/6c148908-2f51-41ee-adb8-bfd5cb821ebf-kube-api-access-wqtt9\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072369 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-nlk8z\" (UID: \"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072457 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-reloader\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072535 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz4lf\" (UniqueName: \"kubernetes.io/projected/e9282b00-b418-4626-9620-8ca6252433b2-kube-api-access-wz4lf\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072627 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-metrics\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072702 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-frr-conf\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072776 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-metrics-certs\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.072845 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bb39058-0f85-42fe-884e-f7ea6e389a1e-cert\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.073271 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-metrics\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: E1203 16:35:21.073117 4768 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.073126 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-frr-conf\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.073047 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-reloader\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: E1203 16:35:21.073653 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-cert podName:3eb4dbc3-d0f3-42bd-8d09-0af1ae304716 nodeName:}" failed. No retries permitted until 2025-12-03 16:35:21.573529596 +0000 UTC m=+1018.492866019 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-cert") pod "frr-k8s-webhook-server-7fcb986d4-nlk8z" (UID: "3eb4dbc3-d0f3-42bd-8d09-0af1ae304716") : secret "frr-k8s-webhook-server-cert" not found Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.073758 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.073847 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6c148908-2f51-41ee-adb8-bfd5cb821ebf-metallb-excludel2\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.073949 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tfhl\" (UniqueName: \"kubernetes.io/projected/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-kube-api-access-5tfhl\") pod \"frr-k8s-webhook-server-7fcb986d4-nlk8z\" (UID: \"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.074300 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e9282b00-b418-4626-9620-8ca6252433b2-frr-startup\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.075197 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk9wc\" (UniqueName: \"kubernetes.io/projected/8bb39058-0f85-42fe-884e-f7ea6e389a1e-kube-api-access-sk9wc\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.075320 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-frr-sockets\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.075164 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e9282b00-b418-4626-9620-8ca6252433b2-frr-startup\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.075612 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e9282b00-b418-4626-9620-8ca6252433b2-frr-sockets\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.080404 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e9282b00-b418-4626-9620-8ca6252433b2-metrics-certs\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.098159 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz4lf\" (UniqueName: \"kubernetes.io/projected/e9282b00-b418-4626-9620-8ca6252433b2-kube-api-access-wz4lf\") pod \"frr-k8s-k7czv\" (UID: \"e9282b00-b418-4626-9620-8ca6252433b2\") " pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.132973 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tfhl\" (UniqueName: \"kubernetes.io/projected/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-kube-api-access-5tfhl\") pod \"frr-k8s-webhook-server-7fcb986d4-nlk8z\" (UID: \"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.167222 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.176816 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-metrics-certs\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.178526 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bb39058-0f85-42fe-884e-f7ea6e389a1e-cert\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.179219 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6c148908-2f51-41ee-adb8-bfd5cb821ebf-metallb-excludel2\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.179399 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.179516 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk9wc\" (UniqueName: \"kubernetes.io/projected/8bb39058-0f85-42fe-884e-f7ea6e389a1e-kube-api-access-sk9wc\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.179715 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8bb39058-0f85-42fe-884e-f7ea6e389a1e-metrics-certs\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.179837 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqtt9\" (UniqueName: \"kubernetes.io/projected/6c148908-2f51-41ee-adb8-bfd5cb821ebf-kube-api-access-wqtt9\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.180010 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/6c148908-2f51-41ee-adb8-bfd5cb821ebf-metallb-excludel2\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: E1203 16:35:21.179534 4768 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 16:35:21 crc kubenswrapper[4768]: E1203 16:35:21.180107 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist podName:6c148908-2f51-41ee-adb8-bfd5cb821ebf nodeName:}" failed. No retries permitted until 2025-12-03 16:35:21.680088648 +0000 UTC m=+1018.599425181 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist") pod "speaker-79sgk" (UID: "6c148908-2f51-41ee-adb8-bfd5cb821ebf") : secret "metallb-memberlist" not found Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.181092 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8bb39058-0f85-42fe-884e-f7ea6e389a1e-cert\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.181271 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-metrics-certs\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.183651 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8bb39058-0f85-42fe-884e-f7ea6e389a1e-metrics-certs\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.199279 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk9wc\" (UniqueName: \"kubernetes.io/projected/8bb39058-0f85-42fe-884e-f7ea6e389a1e-kube-api-access-sk9wc\") pod \"controller-f8648f98b-sbbq7\" (UID: \"8bb39058-0f85-42fe-884e-f7ea6e389a1e\") " pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.199531 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqtt9\" (UniqueName: \"kubernetes.io/projected/6c148908-2f51-41ee-adb8-bfd5cb821ebf-kube-api-access-wqtt9\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.249109 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.585706 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-nlk8z\" (UID: \"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.590649 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3eb4dbc3-d0f3-42bd-8d09-0af1ae304716-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-nlk8z\" (UID: \"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.650141 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-sbbq7"] Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.687486 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:21 crc kubenswrapper[4768]: E1203 16:35:21.687679 4768 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 03 16:35:21 crc kubenswrapper[4768]: E1203 16:35:21.687759 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist podName:6c148908-2f51-41ee-adb8-bfd5cb821ebf nodeName:}" failed. No retries permitted until 2025-12-03 16:35:22.687742063 +0000 UTC m=+1019.607078486 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist") pod "speaker-79sgk" (UID: "6c148908-2f51-41ee-adb8-bfd5cb821ebf") : secret "metallb-memberlist" not found Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.754934 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.814514 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-sbbq7" event={"ID":"8bb39058-0f85-42fe-884e-f7ea6e389a1e","Type":"ContainerStarted","Data":"4ac59dacbd3fb8f40195f2b3caf925325644f60469191cfc45133db40fd1c664"} Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.814559 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-sbbq7" event={"ID":"8bb39058-0f85-42fe-884e-f7ea6e389a1e","Type":"ContainerStarted","Data":"6b59092b2ad181643e71ee1c90881a277c37ab6a76b2d7800d8dbe3749f16a0b"} Dec 03 16:35:21 crc kubenswrapper[4768]: I1203 16:35:21.815961 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerStarted","Data":"e707fcc166a26d04649e787cc0ff8c22d15841db851b13df58519ac12dc5bbf1"} Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.209587 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z"] Dec 03 16:35:22 crc kubenswrapper[4768]: W1203 16:35:22.214393 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3eb4dbc3_d0f3_42bd_8d09_0af1ae304716.slice/crio-a091fa12ecd2cb28189fa34d6a356d3d4527485e10ccd8672f0e6a99ab5afaa6 WatchSource:0}: Error finding container a091fa12ecd2cb28189fa34d6a356d3d4527485e10ccd8672f0e6a99ab5afaa6: Status 404 returned error can't find the container with id a091fa12ecd2cb28189fa34d6a356d3d4527485e10ccd8672f0e6a99ab5afaa6 Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.703754 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.710236 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/6c148908-2f51-41ee-adb8-bfd5cb821ebf-memberlist\") pod \"speaker-79sgk\" (UID: \"6c148908-2f51-41ee-adb8-bfd5cb821ebf\") " pod="metallb-system/speaker-79sgk" Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.727208 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-79sgk" Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.843532 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-79sgk" event={"ID":"6c148908-2f51-41ee-adb8-bfd5cb821ebf","Type":"ContainerStarted","Data":"e51213b5a9382eb589d06ad48a8f357a4bb01341cae07477843347088c92e2a7"} Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.845317 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" event={"ID":"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716","Type":"ContainerStarted","Data":"a091fa12ecd2cb28189fa34d6a356d3d4527485e10ccd8672f0e6a99ab5afaa6"} Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.847042 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-sbbq7" event={"ID":"8bb39058-0f85-42fe-884e-f7ea6e389a1e","Type":"ContainerStarted","Data":"4d9fe762ace18bf7f6a031309000d9b38b012befde7cfbe49356218ed308b3eb"} Dec 03 16:35:22 crc kubenswrapper[4768]: I1203 16:35:22.847230 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:23 crc kubenswrapper[4768]: I1203 16:35:23.573467 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-sbbq7" podStartSLOduration=3.573447868 podStartE2EDuration="3.573447868s" podCreationTimestamp="2025-12-03 16:35:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:35:22.871887416 +0000 UTC m=+1019.791223839" watchObservedRunningTime="2025-12-03 16:35:23.573447868 +0000 UTC m=+1020.492784291" Dec 03 16:35:23 crc kubenswrapper[4768]: I1203 16:35:23.855240 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-79sgk" event={"ID":"6c148908-2f51-41ee-adb8-bfd5cb821ebf","Type":"ContainerStarted","Data":"a36e5c69b3475996fe98a72ed920f0b51caed3ab966ddf56662b5c92294ce1b3"} Dec 03 16:35:23 crc kubenswrapper[4768]: I1203 16:35:23.855285 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-79sgk" event={"ID":"6c148908-2f51-41ee-adb8-bfd5cb821ebf","Type":"ContainerStarted","Data":"f815f3242fa3fb8e45849dd9db348185d028fa69567dd606fc5e1089c25440e2"} Dec 03 16:35:23 crc kubenswrapper[4768]: I1203 16:35:23.855699 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-79sgk" Dec 03 16:35:23 crc kubenswrapper[4768]: I1203 16:35:23.879309 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-79sgk" podStartSLOduration=3.879287289 podStartE2EDuration="3.879287289s" podCreationTimestamp="2025-12-03 16:35:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:35:23.875042389 +0000 UTC m=+1020.794378822" watchObservedRunningTime="2025-12-03 16:35:23.879287289 +0000 UTC m=+1020.798623722" Dec 03 16:35:29 crc kubenswrapper[4768]: I1203 16:35:29.914484 4768 generic.go:334] "Generic (PLEG): container finished" podID="e9282b00-b418-4626-9620-8ca6252433b2" containerID="f956478bf7ef0770f6e60d82a1bcd0b4a77e5359495a7c1ef38352d13c651f0c" exitCode=0 Dec 03 16:35:29 crc kubenswrapper[4768]: I1203 16:35:29.914781 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerDied","Data":"f956478bf7ef0770f6e60d82a1bcd0b4a77e5359495a7c1ef38352d13c651f0c"} Dec 03 16:35:29 crc kubenswrapper[4768]: I1203 16:35:29.919473 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" event={"ID":"3eb4dbc3-d0f3-42bd-8d09-0af1ae304716","Type":"ContainerStarted","Data":"412844118b4c1a264c77ebe866ad5ee6dcc305fcc76d2b582ab35d46624da647"} Dec 03 16:35:29 crc kubenswrapper[4768]: I1203 16:35:29.919715 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:29 crc kubenswrapper[4768]: I1203 16:35:29.990800 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" podStartSLOduration=3.232515144 podStartE2EDuration="9.990779712s" podCreationTimestamp="2025-12-03 16:35:20 +0000 UTC" firstStartedPulling="2025-12-03 16:35:22.216409244 +0000 UTC m=+1019.135745687" lastFinishedPulling="2025-12-03 16:35:28.974673822 +0000 UTC m=+1025.894010255" observedRunningTime="2025-12-03 16:35:29.986247574 +0000 UTC m=+1026.905584007" watchObservedRunningTime="2025-12-03 16:35:29.990779712 +0000 UTC m=+1026.910116145" Dec 03 16:35:30 crc kubenswrapper[4768]: I1203 16:35:30.932078 4768 generic.go:334] "Generic (PLEG): container finished" podID="e9282b00-b418-4626-9620-8ca6252433b2" containerID="73ebf156b09fb7c04c522ad4466fa983c63011b42d5943229aab6d8c59741c2c" exitCode=0 Dec 03 16:35:30 crc kubenswrapper[4768]: I1203 16:35:30.932123 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerDied","Data":"73ebf156b09fb7c04c522ad4466fa983c63011b42d5943229aab6d8c59741c2c"} Dec 03 16:35:31 crc kubenswrapper[4768]: I1203 16:35:31.253160 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-sbbq7" Dec 03 16:35:31 crc kubenswrapper[4768]: I1203 16:35:31.942105 4768 generic.go:334] "Generic (PLEG): container finished" podID="e9282b00-b418-4626-9620-8ca6252433b2" containerID="b00363ce4c827f881d74462d82ecc9890b009ae5efb704527c67a1202845d368" exitCode=0 Dec 03 16:35:31 crc kubenswrapper[4768]: I1203 16:35:31.942169 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerDied","Data":"b00363ce4c827f881d74462d82ecc9890b009ae5efb704527c67a1202845d368"} Dec 03 16:35:32 crc kubenswrapper[4768]: I1203 16:35:32.957105 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerStarted","Data":"ace2b6ca43a38fd298d72e13c0c150f906e9b43e9f804df868595746165cbb7f"} Dec 03 16:35:32 crc kubenswrapper[4768]: I1203 16:35:32.957354 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerStarted","Data":"c449c9be30fdde537c6868abd2cf43f51009d22275ba111ee9a1343efdbbf8db"} Dec 03 16:35:32 crc kubenswrapper[4768]: I1203 16:35:32.957364 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerStarted","Data":"fd3d21c6e17b8d78a391970fdfb8afe45fb3390cc095089e8436eb85612e40bf"} Dec 03 16:35:32 crc kubenswrapper[4768]: I1203 16:35:32.957373 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerStarted","Data":"2198bf2e0feca676e748c53b313ac496855e9680d7c64f771045b3f79d7146b9"} Dec 03 16:35:32 crc kubenswrapper[4768]: I1203 16:35:32.957380 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerStarted","Data":"168a01d5d6e73d4a35c159fd8c3d3d33cb2c84e84179f9f7d4fd6bc50c9f0d0a"} Dec 03 16:35:33 crc kubenswrapper[4768]: I1203 16:35:33.967448 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-k7czv" event={"ID":"e9282b00-b418-4626-9620-8ca6252433b2","Type":"ContainerStarted","Data":"1ce045b790b48f9d938b8555fc4bc7d94a7894a160daa29704d4b4de988322a3"} Dec 03 16:35:33 crc kubenswrapper[4768]: I1203 16:35:33.967772 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:36 crc kubenswrapper[4768]: I1203 16:35:36.167850 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:36 crc kubenswrapper[4768]: I1203 16:35:36.232018 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:36 crc kubenswrapper[4768]: I1203 16:35:36.256364 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-k7czv" podStartSLOduration=8.598636334 podStartE2EDuration="16.256346695s" podCreationTimestamp="2025-12-03 16:35:20 +0000 UTC" firstStartedPulling="2025-12-03 16:35:21.311077798 +0000 UTC m=+1018.230414221" lastFinishedPulling="2025-12-03 16:35:28.968788119 +0000 UTC m=+1025.888124582" observedRunningTime="2025-12-03 16:35:33.992187149 +0000 UTC m=+1030.911523592" watchObservedRunningTime="2025-12-03 16:35:36.256346695 +0000 UTC m=+1033.175683118" Dec 03 16:35:41 crc kubenswrapper[4768]: I1203 16:35:41.758838 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nlk8z" Dec 03 16:35:42 crc kubenswrapper[4768]: I1203 16:35:42.731551 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-79sgk" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.188055 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2kjsm"] Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.189205 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2kjsm" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.191743 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-2hl2p" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.192305 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.192763 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.195535 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2kjsm"] Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.346444 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh2js\" (UniqueName: \"kubernetes.io/projected/167f5b31-aff2-4336-9a3a-f264d35f1fff-kube-api-access-lh2js\") pod \"openstack-operator-index-2kjsm\" (UID: \"167f5b31-aff2-4336-9a3a-f264d35f1fff\") " pod="openstack-operators/openstack-operator-index-2kjsm" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.447792 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh2js\" (UniqueName: \"kubernetes.io/projected/167f5b31-aff2-4336-9a3a-f264d35f1fff-kube-api-access-lh2js\") pod \"openstack-operator-index-2kjsm\" (UID: \"167f5b31-aff2-4336-9a3a-f264d35f1fff\") " pod="openstack-operators/openstack-operator-index-2kjsm" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.468882 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh2js\" (UniqueName: \"kubernetes.io/projected/167f5b31-aff2-4336-9a3a-f264d35f1fff-kube-api-access-lh2js\") pod \"openstack-operator-index-2kjsm\" (UID: \"167f5b31-aff2-4336-9a3a-f264d35f1fff\") " pod="openstack-operators/openstack-operator-index-2kjsm" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.509857 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2kjsm" Dec 03 16:35:46 crc kubenswrapper[4768]: I1203 16:35:46.912579 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2kjsm"] Dec 03 16:35:47 crc kubenswrapper[4768]: I1203 16:35:47.064699 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2kjsm" event={"ID":"167f5b31-aff2-4336-9a3a-f264d35f1fff","Type":"ContainerStarted","Data":"03b559bdba42dc35dffd2d0f71850c1da15970f71d45c0e4e8b3895532bddf3f"} Dec 03 16:35:50 crc kubenswrapper[4768]: I1203 16:35:50.578243 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2kjsm"] Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.092483 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2kjsm" event={"ID":"167f5b31-aff2-4336-9a3a-f264d35f1fff","Type":"ContainerStarted","Data":"c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf"} Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.092571 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-2kjsm" podUID="167f5b31-aff2-4336-9a3a-f264d35f1fff" containerName="registry-server" containerID="cri-o://c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf" gracePeriod=2 Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.111124 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2kjsm" podStartSLOduration=1.331460031 podStartE2EDuration="5.111099856s" podCreationTimestamp="2025-12-03 16:35:46 +0000 UTC" firstStartedPulling="2025-12-03 16:35:46.920109094 +0000 UTC m=+1043.839445517" lastFinishedPulling="2025-12-03 16:35:50.699748919 +0000 UTC m=+1047.619085342" observedRunningTime="2025-12-03 16:35:51.105526891 +0000 UTC m=+1048.024863344" watchObservedRunningTime="2025-12-03 16:35:51.111099856 +0000 UTC m=+1048.030436299" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.171668 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-k7czv" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.185690 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-cqdlr"] Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.186651 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.195998 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cqdlr"] Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.280761 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxvfq\" (UniqueName: \"kubernetes.io/projected/f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f-kube-api-access-bxvfq\") pod \"openstack-operator-index-cqdlr\" (UID: \"f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f\") " pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.389437 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxvfq\" (UniqueName: \"kubernetes.io/projected/f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f-kube-api-access-bxvfq\") pod \"openstack-operator-index-cqdlr\" (UID: \"f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f\") " pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.419570 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxvfq\" (UniqueName: \"kubernetes.io/projected/f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f-kube-api-access-bxvfq\") pod \"openstack-operator-index-cqdlr\" (UID: \"f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f\") " pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.538769 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2kjsm" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.549061 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.693429 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh2js\" (UniqueName: \"kubernetes.io/projected/167f5b31-aff2-4336-9a3a-f264d35f1fff-kube-api-access-lh2js\") pod \"167f5b31-aff2-4336-9a3a-f264d35f1fff\" (UID: \"167f5b31-aff2-4336-9a3a-f264d35f1fff\") " Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.698956 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/167f5b31-aff2-4336-9a3a-f264d35f1fff-kube-api-access-lh2js" (OuterVolumeSpecName: "kube-api-access-lh2js") pod "167f5b31-aff2-4336-9a3a-f264d35f1fff" (UID: "167f5b31-aff2-4336-9a3a-f264d35f1fff"). InnerVolumeSpecName "kube-api-access-lh2js". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.795735 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh2js\" (UniqueName: \"kubernetes.io/projected/167f5b31-aff2-4336-9a3a-f264d35f1fff-kube-api-access-lh2js\") on node \"crc\" DevicePath \"\"" Dec 03 16:35:51 crc kubenswrapper[4768]: I1203 16:35:51.885418 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cqdlr"] Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.100827 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cqdlr" event={"ID":"f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f","Type":"ContainerStarted","Data":"18fd5bb361d3fb92df1ebfc1ed1c2e4ba794c068977ab686ed0af8bf5f1a91b3"} Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.102544 4768 generic.go:334] "Generic (PLEG): container finished" podID="167f5b31-aff2-4336-9a3a-f264d35f1fff" containerID="c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf" exitCode=0 Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.102631 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2kjsm" event={"ID":"167f5b31-aff2-4336-9a3a-f264d35f1fff","Type":"ContainerDied","Data":"c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf"} Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.102650 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2kjsm" event={"ID":"167f5b31-aff2-4336-9a3a-f264d35f1fff","Type":"ContainerDied","Data":"03b559bdba42dc35dffd2d0f71850c1da15970f71d45c0e4e8b3895532bddf3f"} Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.102669 4768 scope.go:117] "RemoveContainer" containerID="c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf" Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.102792 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2kjsm" Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.126368 4768 scope.go:117] "RemoveContainer" containerID="c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf" Dec 03 16:35:52 crc kubenswrapper[4768]: E1203 16:35:52.127458 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf\": container with ID starting with c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf not found: ID does not exist" containerID="c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf" Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.127494 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf"} err="failed to get container status \"c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf\": rpc error: code = NotFound desc = could not find container \"c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf\": container with ID starting with c7e48b0354a456a6a62d7408470a9b1aa17d4cc5121f78be0bae0c9179dbb8cf not found: ID does not exist" Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.132846 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-2kjsm"] Dec 03 16:35:52 crc kubenswrapper[4768]: I1203 16:35:52.134552 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-2kjsm"] Dec 03 16:35:53 crc kubenswrapper[4768]: I1203 16:35:53.108831 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cqdlr" event={"ID":"f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f","Type":"ContainerStarted","Data":"e2ad85c89ed1b9d502021b9c642306e48ee3225415aa684ec11d05e003d78d89"} Dec 03 16:35:53 crc kubenswrapper[4768]: I1203 16:35:53.129023 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-cqdlr" podStartSLOduration=2.080007368 podStartE2EDuration="2.129001873s" podCreationTimestamp="2025-12-03 16:35:51 +0000 UTC" firstStartedPulling="2025-12-03 16:35:51.901386878 +0000 UTC m=+1048.820723311" lastFinishedPulling="2025-12-03 16:35:51.950381383 +0000 UTC m=+1048.869717816" observedRunningTime="2025-12-03 16:35:53.123667324 +0000 UTC m=+1050.043003767" watchObservedRunningTime="2025-12-03 16:35:53.129001873 +0000 UTC m=+1050.048338296" Dec 03 16:35:53 crc kubenswrapper[4768]: I1203 16:35:53.581897 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="167f5b31-aff2-4336-9a3a-f264d35f1fff" path="/var/lib/kubelet/pods/167f5b31-aff2-4336-9a3a-f264d35f1fff/volumes" Dec 03 16:36:01 crc kubenswrapper[4768]: I1203 16:36:01.550478 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:36:01 crc kubenswrapper[4768]: I1203 16:36:01.550818 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:36:01 crc kubenswrapper[4768]: I1203 16:36:01.576442 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:36:02 crc kubenswrapper[4768]: I1203 16:36:02.194043 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-cqdlr" Dec 03 16:36:07 crc kubenswrapper[4768]: I1203 16:36:07.930203 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b"] Dec 03 16:36:07 crc kubenswrapper[4768]: E1203 16:36:07.931207 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="167f5b31-aff2-4336-9a3a-f264d35f1fff" containerName="registry-server" Dec 03 16:36:07 crc kubenswrapper[4768]: I1203 16:36:07.931268 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="167f5b31-aff2-4336-9a3a-f264d35f1fff" containerName="registry-server" Dec 03 16:36:07 crc kubenswrapper[4768]: I1203 16:36:07.932064 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="167f5b31-aff2-4336-9a3a-f264d35f1fff" containerName="registry-server" Dec 03 16:36:07 crc kubenswrapper[4768]: I1203 16:36:07.934479 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:07 crc kubenswrapper[4768]: I1203 16:36:07.936700 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-k9pcw" Dec 03 16:36:07 crc kubenswrapper[4768]: I1203 16:36:07.936841 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b"] Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.020684 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-bundle\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.020746 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-util\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.020806 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cnpn\" (UniqueName: \"kubernetes.io/projected/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-kube-api-access-6cnpn\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.122353 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cnpn\" (UniqueName: \"kubernetes.io/projected/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-kube-api-access-6cnpn\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.122446 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-bundle\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.122486 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-util\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.123117 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-util\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.123510 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-bundle\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.150568 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cnpn\" (UniqueName: \"kubernetes.io/projected/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-kube-api-access-6cnpn\") pod \"fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.259914 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:08 crc kubenswrapper[4768]: I1203 16:36:08.706842 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b"] Dec 03 16:36:09 crc kubenswrapper[4768]: I1203 16:36:09.221331 4768 generic.go:334] "Generic (PLEG): container finished" podID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerID="a7bbdabb7a125d25d33139aca33bf3db99280647eb7b090f35c1fec73073f9fb" exitCode=0 Dec 03 16:36:09 crc kubenswrapper[4768]: I1203 16:36:09.221397 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" event={"ID":"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb","Type":"ContainerDied","Data":"a7bbdabb7a125d25d33139aca33bf3db99280647eb7b090f35c1fec73073f9fb"} Dec 03 16:36:09 crc kubenswrapper[4768]: I1203 16:36:09.221702 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" event={"ID":"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb","Type":"ContainerStarted","Data":"b66694b3a8eaa7caa34545ea93b0e6e73fd4cf8b0c7b147abe65940331ae4466"} Dec 03 16:36:09 crc kubenswrapper[4768]: I1203 16:36:09.223541 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:36:10 crc kubenswrapper[4768]: I1203 16:36:10.230777 4768 generic.go:334] "Generic (PLEG): container finished" podID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerID="a698d8b56274f6e18bdf0c37e8a870178cead869ddbd90af1b5a1193920cb674" exitCode=0 Dec 03 16:36:10 crc kubenswrapper[4768]: I1203 16:36:10.231066 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" event={"ID":"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb","Type":"ContainerDied","Data":"a698d8b56274f6e18bdf0c37e8a870178cead869ddbd90af1b5a1193920cb674"} Dec 03 16:36:11 crc kubenswrapper[4768]: I1203 16:36:11.239283 4768 generic.go:334] "Generic (PLEG): container finished" podID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerID="89371673dd7d85d4f05413385a7d2b0982283294a66d6923767fd34ce56cd878" exitCode=0 Dec 03 16:36:11 crc kubenswrapper[4768]: I1203 16:36:11.239325 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" event={"ID":"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb","Type":"ContainerDied","Data":"89371673dd7d85d4f05413385a7d2b0982283294a66d6923767fd34ce56cd878"} Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.615636 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.693076 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cnpn\" (UniqueName: \"kubernetes.io/projected/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-kube-api-access-6cnpn\") pod \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.693160 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-bundle\") pod \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.693236 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-util\") pod \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\" (UID: \"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb\") " Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.695538 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-bundle" (OuterVolumeSpecName: "bundle") pod "6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" (UID: "6c41b0af-ccc3-49e5-a009-0ab5ea153ebb"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.700421 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-kube-api-access-6cnpn" (OuterVolumeSpecName: "kube-api-access-6cnpn") pod "6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" (UID: "6c41b0af-ccc3-49e5-a009-0ab5ea153ebb"). InnerVolumeSpecName "kube-api-access-6cnpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.712107 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-util" (OuterVolumeSpecName: "util") pod "6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" (UID: "6c41b0af-ccc3-49e5-a009-0ab5ea153ebb"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.794482 4768 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-util\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.794564 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cnpn\" (UniqueName: \"kubernetes.io/projected/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-kube-api-access-6cnpn\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:12 crc kubenswrapper[4768]: I1203 16:36:12.794585 4768 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6c41b0af-ccc3-49e5-a009-0ab5ea153ebb-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:36:13 crc kubenswrapper[4768]: I1203 16:36:13.257643 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" event={"ID":"6c41b0af-ccc3-49e5-a009-0ab5ea153ebb","Type":"ContainerDied","Data":"b66694b3a8eaa7caa34545ea93b0e6e73fd4cf8b0c7b147abe65940331ae4466"} Dec 03 16:36:13 crc kubenswrapper[4768]: I1203 16:36:13.257988 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b66694b3a8eaa7caa34545ea93b0e6e73fd4cf8b0c7b147abe65940331ae4466" Dec 03 16:36:13 crc kubenswrapper[4768]: I1203 16:36:13.257751 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.154775 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc"] Dec 03 16:36:20 crc kubenswrapper[4768]: E1203 16:36:20.155391 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerName="pull" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.155404 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerName="pull" Dec 03 16:36:20 crc kubenswrapper[4768]: E1203 16:36:20.155415 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerName="extract" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.155420 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerName="extract" Dec 03 16:36:20 crc kubenswrapper[4768]: E1203 16:36:20.155438 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerName="util" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.155444 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerName="util" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.155556 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c41b0af-ccc3-49e5-a009-0ab5ea153ebb" containerName="extract" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.155944 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.158188 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-4jx54" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.184256 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc"] Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.291620 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t99dx\" (UniqueName: \"kubernetes.io/projected/a616a1fc-015c-4f96-ab87-cb3fe397e123-kube-api-access-t99dx\") pod \"openstack-operator-controller-operator-65d54995fc-qt7jc\" (UID: \"a616a1fc-015c-4f96-ab87-cb3fe397e123\") " pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.393002 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t99dx\" (UniqueName: \"kubernetes.io/projected/a616a1fc-015c-4f96-ab87-cb3fe397e123-kube-api-access-t99dx\") pod \"openstack-operator-controller-operator-65d54995fc-qt7jc\" (UID: \"a616a1fc-015c-4f96-ab87-cb3fe397e123\") " pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.413455 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t99dx\" (UniqueName: \"kubernetes.io/projected/a616a1fc-015c-4f96-ab87-cb3fe397e123-kube-api-access-t99dx\") pod \"openstack-operator-controller-operator-65d54995fc-qt7jc\" (UID: \"a616a1fc-015c-4f96-ab87-cb3fe397e123\") " pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.473638 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" Dec 03 16:36:20 crc kubenswrapper[4768]: I1203 16:36:20.885356 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc"] Dec 03 16:36:21 crc kubenswrapper[4768]: I1203 16:36:21.670697 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" event={"ID":"a616a1fc-015c-4f96-ab87-cb3fe397e123","Type":"ContainerStarted","Data":"2d4629221e013bda0a392b1bcc4387fa9a566142265a0b3ce5bb0e152f1ab071"} Dec 03 16:36:25 crc kubenswrapper[4768]: I1203 16:36:25.705875 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" event={"ID":"a616a1fc-015c-4f96-ab87-cb3fe397e123","Type":"ContainerStarted","Data":"95e00136379c6647c4c162f35201a5886781a48c86dc6d360600843c3439d8d3"} Dec 03 16:36:25 crc kubenswrapper[4768]: I1203 16:36:25.706278 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" Dec 03 16:36:25 crc kubenswrapper[4768]: I1203 16:36:25.759728 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" podStartSLOduration=1.887802997 podStartE2EDuration="5.759704563s" podCreationTimestamp="2025-12-03 16:36:20 +0000 UTC" firstStartedPulling="2025-12-03 16:36:20.893978589 +0000 UTC m=+1077.813315012" lastFinishedPulling="2025-12-03 16:36:24.765880155 +0000 UTC m=+1081.685216578" observedRunningTime="2025-12-03 16:36:25.75190695 +0000 UTC m=+1082.671243413" watchObservedRunningTime="2025-12-03 16:36:25.759704563 +0000 UTC m=+1082.679041016" Dec 03 16:36:26 crc kubenswrapper[4768]: I1203 16:36:26.027855 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:36:26 crc kubenswrapper[4768]: I1203 16:36:26.028213 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:36:30 crc kubenswrapper[4768]: I1203 16:36:30.477672 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-65d54995fc-qt7jc" Dec 03 16:36:56 crc kubenswrapper[4768]: I1203 16:36:56.028797 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:36:56 crc kubenswrapper[4768]: I1203 16:36:56.029389 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.533498 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.535088 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.540297 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-2dc2t" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.546784 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.548032 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.553546 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-wkbvv" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.574034 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.577728 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.578774 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.580730 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-5dfjn" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.599142 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.606283 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.612380 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.614914 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.619248 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-n6wlb" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.629551 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.630523 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.632032 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-p6c2m" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.668395 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.674771 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.708825 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.710021 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x59zz\" (UniqueName: \"kubernetes.io/projected/ff2d8ce7-0093-406f-982e-dac8b2b62593-kube-api-access-x59zz\") pod \"designate-operator-controller-manager-78b4bc895b-hg9tx\" (UID: \"ff2d8ce7-0093-406f-982e-dac8b2b62593\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.710061 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpf76\" (UniqueName: \"kubernetes.io/projected/0eb6c4a6-a68d-4d28-9b09-64a3dd981978-kube-api-access-lpf76\") pod \"cinder-operator-controller-manager-859b6ccc6-jgz9x\" (UID: \"0eb6c4a6-a68d-4d28-9b09-64a3dd981978\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.710085 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ls5d\" (UniqueName: \"kubernetes.io/projected/8cb78567-ca7b-4a8b-9f94-b503727cf509-kube-api-access-9ls5d\") pod \"barbican-operator-controller-manager-7d9dfd778-g5nnn\" (UID: \"8cb78567-ca7b-4a8b-9f94-b503727cf509\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.717705 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-jp6vz" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.727922 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.746851 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.747985 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.750513 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.750672 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-7hkx8" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.768547 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.769764 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.774497 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-l7plq" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.799400 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.811176 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gfw7r\" (UniqueName: \"kubernetes.io/projected/ac20b433-8d19-4ffc-a3d8-001ab7660cfb-kube-api-access-gfw7r\") pod \"glance-operator-controller-manager-77987cd8cd-5mfx6\" (UID: \"ac20b433-8d19-4ffc-a3d8-001ab7660cfb\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.811234 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x59zz\" (UniqueName: \"kubernetes.io/projected/ff2d8ce7-0093-406f-982e-dac8b2b62593-kube-api-access-x59zz\") pod \"designate-operator-controller-manager-78b4bc895b-hg9tx\" (UID: \"ff2d8ce7-0093-406f-982e-dac8b2b62593\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.811270 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpf76\" (UniqueName: \"kubernetes.io/projected/0eb6c4a6-a68d-4d28-9b09-64a3dd981978-kube-api-access-lpf76\") pod \"cinder-operator-controller-manager-859b6ccc6-jgz9x\" (UID: \"0eb6c4a6-a68d-4d28-9b09-64a3dd981978\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.811296 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk27v\" (UniqueName: \"kubernetes.io/projected/f91ea1ca-d4a3-47c9-a5a8-38a78224668a-kube-api-access-zk27v\") pod \"horizon-operator-controller-manager-68c6d99b8f-m8lmv\" (UID: \"f91ea1ca-d4a3-47c9-a5a8-38a78224668a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.811318 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ls5d\" (UniqueName: \"kubernetes.io/projected/8cb78567-ca7b-4a8b-9f94-b503727cf509-kube-api-access-9ls5d\") pod \"barbican-operator-controller-manager-7d9dfd778-g5nnn\" (UID: \"8cb78567-ca7b-4a8b-9f94-b503727cf509\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.811336 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kklzn\" (UniqueName: \"kubernetes.io/projected/085d4818-0975-441d-87fc-8c22aa78d86f-kube-api-access-kklzn\") pod \"heat-operator-controller-manager-5f64f6f8bb-m97mz\" (UID: \"085d4818-0975-441d-87fc-8c22aa78d86f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.816773 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.817777 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.825685 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.836297 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.837623 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.838008 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.841637 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-xjtgq" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.841854 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-w96dd" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.846483 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x59zz\" (UniqueName: \"kubernetes.io/projected/ff2d8ce7-0093-406f-982e-dac8b2b62593-kube-api-access-x59zz\") pod \"designate-operator-controller-manager-78b4bc895b-hg9tx\" (UID: \"ff2d8ce7-0093-406f-982e-dac8b2b62593\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.855708 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.886104 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.886579 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ls5d\" (UniqueName: \"kubernetes.io/projected/8cb78567-ca7b-4a8b-9f94-b503727cf509-kube-api-access-9ls5d\") pod \"barbican-operator-controller-manager-7d9dfd778-g5nnn\" (UID: \"8cb78567-ca7b-4a8b-9f94-b503727cf509\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.886579 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpf76\" (UniqueName: \"kubernetes.io/projected/0eb6c4a6-a68d-4d28-9b09-64a3dd981978-kube-api-access-lpf76\") pod \"cinder-operator-controller-manager-859b6ccc6-jgz9x\" (UID: \"0eb6c4a6-a68d-4d28-9b09-64a3dd981978\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.894433 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.896803 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.897999 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.902117 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-chnsw" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.903824 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.906820 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.909160 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.913842 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l9dd\" (UniqueName: \"kubernetes.io/projected/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-kube-api-access-2l9dd\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.913893 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk27v\" (UniqueName: \"kubernetes.io/projected/f91ea1ca-d4a3-47c9-a5a8-38a78224668a-kube-api-access-zk27v\") pod \"horizon-operator-controller-manager-68c6d99b8f-m8lmv\" (UID: \"f91ea1ca-d4a3-47c9-a5a8-38a78224668a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.913915 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kklzn\" (UniqueName: \"kubernetes.io/projected/085d4818-0975-441d-87fc-8c22aa78d86f-kube-api-access-kklzn\") pod \"heat-operator-controller-manager-5f64f6f8bb-m97mz\" (UID: \"085d4818-0975-441d-87fc-8c22aa78d86f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.913935 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.913959 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msbqx\" (UniqueName: \"kubernetes.io/projected/ad48f666-a22a-4d97-9736-5f284268bd4a-kube-api-access-msbqx\") pod \"ironic-operator-controller-manager-6c548fd776-9q857\" (UID: \"ad48f666-a22a-4d97-9736-5f284268bd4a\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.914009 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gfw7r\" (UniqueName: \"kubernetes.io/projected/ac20b433-8d19-4ffc-a3d8-001ab7660cfb-kube-api-access-gfw7r\") pod \"glance-operator-controller-manager-77987cd8cd-5mfx6\" (UID: \"ac20b433-8d19-4ffc-a3d8-001ab7660cfb\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.914705 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-2rrpp" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.919127 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.920754 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.923698 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ktz8m" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.938104 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gfw7r\" (UniqueName: \"kubernetes.io/projected/ac20b433-8d19-4ffc-a3d8-001ab7660cfb-kube-api-access-gfw7r\") pod \"glance-operator-controller-manager-77987cd8cd-5mfx6\" (UID: \"ac20b433-8d19-4ffc-a3d8-001ab7660cfb\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.945011 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk27v\" (UniqueName: \"kubernetes.io/projected/f91ea1ca-d4a3-47c9-a5a8-38a78224668a-kube-api-access-zk27v\") pod \"horizon-operator-controller-manager-68c6d99b8f-m8lmv\" (UID: \"f91ea1ca-d4a3-47c9-a5a8-38a78224668a\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.945222 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kklzn\" (UniqueName: \"kubernetes.io/projected/085d4818-0975-441d-87fc-8c22aa78d86f-kube-api-access-kklzn\") pod \"heat-operator-controller-manager-5f64f6f8bb-m97mz\" (UID: \"085d4818-0975-441d-87fc-8c22aa78d86f\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.948718 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.952415 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.960500 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.969772 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.970713 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.970941 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.974062 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-hsr46" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.988689 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.989808 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.991689 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-bzhlb" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.995900 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp"] Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.997583 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.999188 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-tlkvt" Dec 03 16:37:08 crc kubenswrapper[4768]: I1203 16:37:08.999418 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.002369 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.015159 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzsfv\" (UniqueName: \"kubernetes.io/projected/dc9eedd8-2956-447b-9a21-7b71bcb0c8c4-kube-api-access-dzsfv\") pod \"keystone-operator-controller-manager-7765d96ddf-7pfgm\" (UID: \"dc9eedd8-2956-447b-9a21-7b71bcb0c8c4\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.015226 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l9dd\" (UniqueName: \"kubernetes.io/projected/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-kube-api-access-2l9dd\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.015249 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqnqq\" (UniqueName: \"kubernetes.io/projected/7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa-kube-api-access-cqnqq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-s6lpx\" (UID: \"7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.015285 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjz24\" (UniqueName: \"kubernetes.io/projected/34791f4b-32bc-44e5-90ca-ec286f96fe15-kube-api-access-gjz24\") pod \"mariadb-operator-controller-manager-56bbcc9d85-9n7sr\" (UID: \"34791f4b-32bc-44e5-90ca-ec286f96fe15\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.015313 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.015340 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msbqx\" (UniqueName: \"kubernetes.io/projected/ad48f666-a22a-4d97-9736-5f284268bd4a-kube-api-access-msbqx\") pod \"ironic-operator-controller-manager-6c548fd776-9q857\" (UID: \"ad48f666-a22a-4d97-9736-5f284268bd4a\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.015371 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htnz5\" (UniqueName: \"kubernetes.io/projected/a9dcaa43-ad02-45aa-a320-dd9d2c609bf4-kube-api-access-htnz5\") pod \"manila-operator-controller-manager-7c79b5df47-zv6pv\" (UID: \"a9dcaa43-ad02-45aa-a320-dd9d2c609bf4\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.015489 4768 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.015541 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert podName:3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:09.515522655 +0000 UTC m=+1126.434859078 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert") pod "infra-operator-controller-manager-57548d458d-vj7sm" (UID: "3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.019513 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.021526 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.036194 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.040436 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l9dd\" (UniqueName: \"kubernetes.io/projected/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-kube-api-access-2l9dd\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.042685 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.042748 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.046589 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.048173 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msbqx\" (UniqueName: \"kubernetes.io/projected/ad48f666-a22a-4d97-9736-5f284268bd4a-kube-api-access-msbqx\") pod \"ironic-operator-controller-manager-6c548fd776-9q857\" (UID: \"ad48f666-a22a-4d97-9736-5f284268bd4a\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.051103 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-7knrm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.051533 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-5xfvp" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.081109 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.093142 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.111882 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.121933 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htnz5\" (UniqueName: \"kubernetes.io/projected/a9dcaa43-ad02-45aa-a320-dd9d2c609bf4-kube-api-access-htnz5\") pod \"manila-operator-controller-manager-7c79b5df47-zv6pv\" (UID: \"a9dcaa43-ad02-45aa-a320-dd9d2c609bf4\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.121996 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtwrl\" (UniqueName: \"kubernetes.io/projected/29a3455b-b1d4-496e-936b-348846b289e0-kube-api-access-mtwrl\") pod \"nova-operator-controller-manager-697bc559fc-bfpmf\" (UID: \"29a3455b-b1d4-496e-936b-348846b289e0\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.122031 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g58fx\" (UniqueName: \"kubernetes.io/projected/bec968a9-b8ec-48f3-9625-96ce1f1e2dda-kube-api-access-g58fx\") pod \"octavia-operator-controller-manager-998648c74-cnc7h\" (UID: \"bec968a9-b8ec-48f3-9625-96ce1f1e2dda\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.122084 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzsfv\" (UniqueName: \"kubernetes.io/projected/dc9eedd8-2956-447b-9a21-7b71bcb0c8c4-kube-api-access-dzsfv\") pod \"keystone-operator-controller-manager-7765d96ddf-7pfgm\" (UID: \"dc9eedd8-2956-447b-9a21-7b71bcb0c8c4\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.122113 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv7gm\" (UniqueName: \"kubernetes.io/projected/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-kube-api-access-bv7gm\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.122161 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqnqq\" (UniqueName: \"kubernetes.io/projected/7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa-kube-api-access-cqnqq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-s6lpx\" (UID: \"7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.122208 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86mgr\" (UniqueName: \"kubernetes.io/projected/f6440acf-55b8-48fb-b212-550dcc9e9600-kube-api-access-86mgr\") pod \"ovn-operator-controller-manager-b6456fdb6-jwxs2\" (UID: \"f6440acf-55b8-48fb-b212-550dcc9e9600\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.122230 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjz24\" (UniqueName: \"kubernetes.io/projected/34791f4b-32bc-44e5-90ca-ec286f96fe15-kube-api-access-gjz24\") pod \"mariadb-operator-controller-manager-56bbcc9d85-9n7sr\" (UID: \"34791f4b-32bc-44e5-90ca-ec286f96fe15\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.122265 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.155471 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqnqq\" (UniqueName: \"kubernetes.io/projected/7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa-kube-api-access-cqnqq\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-s6lpx\" (UID: \"7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.156821 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.158426 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzsfv\" (UniqueName: \"kubernetes.io/projected/dc9eedd8-2956-447b-9a21-7b71bcb0c8c4-kube-api-access-dzsfv\") pod \"keystone-operator-controller-manager-7765d96ddf-7pfgm\" (UID: \"dc9eedd8-2956-447b-9a21-7b71bcb0c8c4\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.160494 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjz24\" (UniqueName: \"kubernetes.io/projected/34791f4b-32bc-44e5-90ca-ec286f96fe15-kube-api-access-gjz24\") pod \"mariadb-operator-controller-manager-56bbcc9d85-9n7sr\" (UID: \"34791f4b-32bc-44e5-90ca-ec286f96fe15\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.164238 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htnz5\" (UniqueName: \"kubernetes.io/projected/a9dcaa43-ad02-45aa-a320-dd9d2c609bf4-kube-api-access-htnz5\") pod \"manila-operator-controller-manager-7c79b5df47-zv6pv\" (UID: \"a9dcaa43-ad02-45aa-a320-dd9d2c609bf4\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.177643 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.202641 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.203905 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.207240 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-jmhn2" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.216647 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.233526 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.236315 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lrvr\" (UniqueName: \"kubernetes.io/projected/347b8067-6147-477e-b00b-a5a60a29b7d8-kube-api-access-7lrvr\") pod \"swift-operator-controller-manager-5f8c65bbfc-n2qpd\" (UID: \"347b8067-6147-477e-b00b-a5a60a29b7d8\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.236357 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtwrl\" (UniqueName: \"kubernetes.io/projected/29a3455b-b1d4-496e-936b-348846b289e0-kube-api-access-mtwrl\") pod \"nova-operator-controller-manager-697bc559fc-bfpmf\" (UID: \"29a3455b-b1d4-496e-936b-348846b289e0\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.236385 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g58fx\" (UniqueName: \"kubernetes.io/projected/bec968a9-b8ec-48f3-9625-96ce1f1e2dda-kube-api-access-g58fx\") pod \"octavia-operator-controller-manager-998648c74-cnc7h\" (UID: \"bec968a9-b8ec-48f3-9625-96ce1f1e2dda\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.236418 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv7gm\" (UniqueName: \"kubernetes.io/projected/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-kube-api-access-bv7gm\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.236447 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5szhk\" (UniqueName: \"kubernetes.io/projected/982d5154-f537-4205-b268-3ce9aa7bdc37-kube-api-access-5szhk\") pod \"placement-operator-controller-manager-78f8948974-qt8cv\" (UID: \"982d5154-f537-4205-b268-3ce9aa7bdc37\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.236478 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86mgr\" (UniqueName: \"kubernetes.io/projected/f6440acf-55b8-48fb-b212-550dcc9e9600-kube-api-access-86mgr\") pod \"ovn-operator-controller-manager-b6456fdb6-jwxs2\" (UID: \"f6440acf-55b8-48fb-b212-550dcc9e9600\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.236505 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.247022 4768 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.247104 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert podName:bae1e6ba-54bf-411a-a2b9-b79b8ff85210 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:09.747086143 +0000 UTC m=+1126.666422566 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" (UID: "bae1e6ba-54bf-411a-a2b9-b79b8ff85210") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.262470 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g58fx\" (UniqueName: \"kubernetes.io/projected/bec968a9-b8ec-48f3-9625-96ce1f1e2dda-kube-api-access-g58fx\") pod \"octavia-operator-controller-manager-998648c74-cnc7h\" (UID: \"bec968a9-b8ec-48f3-9625-96ce1f1e2dda\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.262746 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv7gm\" (UniqueName: \"kubernetes.io/projected/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-kube-api-access-bv7gm\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.267145 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86mgr\" (UniqueName: \"kubernetes.io/projected/f6440acf-55b8-48fb-b212-550dcc9e9600-kube-api-access-86mgr\") pod \"ovn-operator-controller-manager-b6456fdb6-jwxs2\" (UID: \"f6440acf-55b8-48fb-b212-550dcc9e9600\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.267880 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtwrl\" (UniqueName: \"kubernetes.io/projected/29a3455b-b1d4-496e-936b-348846b289e0-kube-api-access-mtwrl\") pod \"nova-operator-controller-manager-697bc559fc-bfpmf\" (UID: \"29a3455b-b1d4-496e-936b-348846b289e0\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.268310 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.275073 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d2m42"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.276406 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.283595 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-c6p5c" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.286817 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.289189 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d2m42"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.308250 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.310924 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.312163 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.319071 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-plrl9" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.319088 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.328748 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.336446 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.337311 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5szhk\" (UniqueName: \"kubernetes.io/projected/982d5154-f537-4205-b268-3ce9aa7bdc37-kube-api-access-5szhk\") pod \"placement-operator-controller-manager-78f8948974-qt8cv\" (UID: \"982d5154-f537-4205-b268-3ce9aa7bdc37\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.337390 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7b7r\" (UniqueName: \"kubernetes.io/projected/f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5-kube-api-access-z7b7r\") pod \"telemetry-operator-controller-manager-5b6647b8f8-ztfkl\" (UID: \"f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5\") " pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.337431 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lrvr\" (UniqueName: \"kubernetes.io/projected/347b8067-6147-477e-b00b-a5a60a29b7d8-kube-api-access-7lrvr\") pod \"swift-operator-controller-manager-5f8c65bbfc-n2qpd\" (UID: \"347b8067-6147-477e-b00b-a5a60a29b7d8\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.337399 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.341479 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.342275 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.342325 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-phblj" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.342417 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.348082 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.356853 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.357531 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.357785 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.367901 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lrvr\" (UniqueName: \"kubernetes.io/projected/347b8067-6147-477e-b00b-a5a60a29b7d8-kube-api-access-7lrvr\") pod \"swift-operator-controller-manager-5f8c65bbfc-n2qpd\" (UID: \"347b8067-6147-477e-b00b-a5a60a29b7d8\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.372511 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-pj67r" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.378190 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.383581 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.391015 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5szhk\" (UniqueName: \"kubernetes.io/projected/982d5154-f537-4205-b268-3ce9aa7bdc37-kube-api-access-5szhk\") pod \"placement-operator-controller-manager-78f8948974-qt8cv\" (UID: \"982d5154-f537-4205-b268-3ce9aa7bdc37\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.411832 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.438581 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7b7r\" (UniqueName: \"kubernetes.io/projected/f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5-kube-api-access-z7b7r\") pod \"telemetry-operator-controller-manager-5b6647b8f8-ztfkl\" (UID: \"f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5\") " pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.438718 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkrhn\" (UniqueName: \"kubernetes.io/projected/5a2dd1fe-2811-43db-959d-aceff599106d-kube-api-access-kkrhn\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.438754 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.438784 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q4r4\" (UniqueName: \"kubernetes.io/projected/051e5034-1155-4000-9d5b-96ee80ba6968-kube-api-access-5q4r4\") pod \"watcher-operator-controller-manager-769dc69bc-lg92p\" (UID: \"051e5034-1155-4000-9d5b-96ee80ba6968\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.438817 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.438987 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c5tz\" (UniqueName: \"kubernetes.io/projected/e3af024c-b6f0-45c8-b5ab-6873b661878e-kube-api-access-9c5tz\") pod \"test-operator-controller-manager-5854674fcc-d2m42\" (UID: \"e3af024c-b6f0-45c8-b5ab-6873b661878e\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.449625 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.460899 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7b7r\" (UniqueName: \"kubernetes.io/projected/f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5-kube-api-access-z7b7r\") pod \"telemetry-operator-controller-manager-5b6647b8f8-ztfkl\" (UID: \"f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5\") " pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.467921 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.488056 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.533755 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.541450 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.541499 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c5tz\" (UniqueName: \"kubernetes.io/projected/e3af024c-b6f0-45c8-b5ab-6873b661878e-kube-api-access-9c5tz\") pod \"test-operator-controller-manager-5854674fcc-d2m42\" (UID: \"e3af024c-b6f0-45c8-b5ab-6873b661878e\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.541552 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.541606 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr6w9\" (UniqueName: \"kubernetes.io/projected/f168e437-903a-4624-a0bc-95ea6b0e1789-kube-api-access-hr6w9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-vkjxc\" (UID: \"f168e437-903a-4624-a0bc-95ea6b0e1789\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.541660 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkrhn\" (UniqueName: \"kubernetes.io/projected/5a2dd1fe-2811-43db-959d-aceff599106d-kube-api-access-kkrhn\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.541686 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.541707 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q4r4\" (UniqueName: \"kubernetes.io/projected/051e5034-1155-4000-9d5b-96ee80ba6968-kube-api-access-5q4r4\") pod \"watcher-operator-controller-manager-769dc69bc-lg92p\" (UID: \"051e5034-1155-4000-9d5b-96ee80ba6968\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.541785 4768 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.541858 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:10.041836686 +0000 UTC m=+1126.961173109 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "metrics-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.542167 4768 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.542167 4768 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.542208 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:10.042194565 +0000 UTC m=+1126.961530988 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.542247 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert podName:3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:10.542221856 +0000 UTC m=+1127.461558369 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert") pod "infra-operator-controller-manager-57548d458d-vj7sm" (UID: "3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.579375 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q4r4\" (UniqueName: \"kubernetes.io/projected/051e5034-1155-4000-9d5b-96ee80ba6968-kube-api-access-5q4r4\") pod \"watcher-operator-controller-manager-769dc69bc-lg92p\" (UID: \"051e5034-1155-4000-9d5b-96ee80ba6968\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.580193 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c5tz\" (UniqueName: \"kubernetes.io/projected/e3af024c-b6f0-45c8-b5ab-6873b661878e-kube-api-access-9c5tz\") pod \"test-operator-controller-manager-5854674fcc-d2m42\" (UID: \"e3af024c-b6f0-45c8-b5ab-6873b661878e\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.584024 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkrhn\" (UniqueName: \"kubernetes.io/projected/5a2dd1fe-2811-43db-959d-aceff599106d-kube-api-access-kkrhn\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.636064 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.643071 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr6w9\" (UniqueName: \"kubernetes.io/projected/f168e437-903a-4624-a0bc-95ea6b0e1789-kube-api-access-hr6w9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-vkjxc\" (UID: \"f168e437-903a-4624-a0bc-95ea6b0e1789\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.660067 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.666316 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr6w9\" (UniqueName: \"kubernetes.io/projected/f168e437-903a-4624-a0bc-95ea6b0e1789-kube-api-access-hr6w9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-vkjxc\" (UID: \"f168e437-903a-4624-a0bc-95ea6b0e1789\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.686596 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.694128 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.721703 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857"] Dec 03 16:37:09 crc kubenswrapper[4768]: W1203 16:37:09.737765 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad48f666_a22a_4d97_9736_5f284268bd4a.slice/crio-95ffe37cfbe811abb118e0c679f56598e53e8c2c9e7f74c91c0f5bc44aaab5ca WatchSource:0}: Error finding container 95ffe37cfbe811abb118e0c679f56598e53e8c2c9e7f74c91c0f5bc44aaab5ca: Status 404 returned error can't find the container with id 95ffe37cfbe811abb118e0c679f56598e53e8c2c9e7f74c91c0f5bc44aaab5ca Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.779489 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn"] Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.791190 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x"] Dec 03 16:37:09 crc kubenswrapper[4768]: W1203 16:37:09.824102 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cb78567_ca7b_4a8b_9f94_b503727cf509.slice/crio-9ac7c693b6368094c07ae85bbf0ac75182d6e28c4bee103a81bec0ab8403cfe2 WatchSource:0}: Error finding container 9ac7c693b6368094c07ae85bbf0ac75182d6e28c4bee103a81bec0ab8403cfe2: Status 404 returned error can't find the container with id 9ac7c693b6368094c07ae85bbf0ac75182d6e28c4bee103a81bec0ab8403cfe2 Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.846635 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.846795 4768 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: E1203 16:37:09.846870 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert podName:bae1e6ba-54bf-411a-a2b9-b79b8ff85210 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:10.846856019 +0000 UTC m=+1127.766192442 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" (UID: "bae1e6ba-54bf-411a-a2b9-b79b8ff85210") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:09 crc kubenswrapper[4768]: I1203 16:37:09.998338 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.049999 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.050207 4768 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.050684 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.050774 4768 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.050812 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:11.050795579 +0000 UTC m=+1127.970132002 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "webhook-server-cert" not found Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.050832 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:11.050824999 +0000 UTC m=+1127.970161422 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "metrics-server-cert" not found Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.083914 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" event={"ID":"ff2d8ce7-0093-406f-982e-dac8b2b62593","Type":"ContainerStarted","Data":"fc20980eb69daa6ede9d7038e6f640886c6fb4dfb1f89a2c926b607666268897"} Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.086635 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" event={"ID":"ac20b433-8d19-4ffc-a3d8-001ab7660cfb","Type":"ContainerStarted","Data":"e148dedd4e0d2d3b9ccf1e6aec1d0399d37485d20227f0d699948ec1baf07a75"} Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.087692 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" event={"ID":"085d4818-0975-441d-87fc-8c22aa78d86f","Type":"ContainerStarted","Data":"b38781e58dee8a98bbe283e753177245731ca2c70490688b8742df92e8108374"} Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.093559 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" event={"ID":"ad48f666-a22a-4d97-9736-5f284268bd4a","Type":"ContainerStarted","Data":"95ffe37cfbe811abb118e0c679f56598e53e8c2c9e7f74c91c0f5bc44aaab5ca"} Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.096153 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" event={"ID":"f91ea1ca-d4a3-47c9-a5a8-38a78224668a","Type":"ContainerStarted","Data":"c750efff83481499d5aa0f516d50b99e4c61643c0cb12b8b8572917b314654be"} Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.099155 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" event={"ID":"0eb6c4a6-a68d-4d28-9b09-64a3dd981978","Type":"ContainerStarted","Data":"b72c7ceb2a21e0b5781272055c390f53aed54626ab071f2ef43a0ac4c5642b9b"} Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.100265 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" event={"ID":"8cb78567-ca7b-4a8b-9f94-b503727cf509","Type":"ContainerStarted","Data":"9ac7c693b6368094c07ae85bbf0ac75182d6e28c4bee103a81bec0ab8403cfe2"} Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.281214 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h"] Dec 03 16:37:10 crc kubenswrapper[4768]: W1203 16:37:10.288571 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9dcaa43_ad02_45aa_a320_dd9d2c609bf4.slice/crio-5b40b3657756f8f2d486204fbb1f79e5b2be5c0c4ca614afea9d7d3da9bcb07d WatchSource:0}: Error finding container 5b40b3657756f8f2d486204fbb1f79e5b2be5c0c4ca614afea9d7d3da9bcb07d: Status 404 returned error can't find the container with id 5b40b3657756f8f2d486204fbb1f79e5b2be5c0c4ca614afea9d7d3da9bcb07d Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.288637 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.305677 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr"] Dec 03 16:37:10 crc kubenswrapper[4768]: W1203 16:37:10.306074 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbec968a9_b8ec_48f3_9625_96ce1f1e2dda.slice/crio-c3be458c2e9d59a42379d799a1283e802a5d882a198ad2ba97c2d1746a4a9347 WatchSource:0}: Error finding container c3be458c2e9d59a42379d799a1283e802a5d882a198ad2ba97c2d1746a4a9347: Status 404 returned error can't find the container with id c3be458c2e9d59a42379d799a1283e802a5d882a198ad2ba97c2d1746a4a9347 Dec 03 16:37:10 crc kubenswrapper[4768]: W1203 16:37:10.307215 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29a3455b_b1d4_496e_936b_348846b289e0.slice/crio-d4575d6b58b06dc337a4fc0d257ccc788877f00d26b4f14d6f600029be06f359 WatchSource:0}: Error finding container d4575d6b58b06dc337a4fc0d257ccc788877f00d26b4f14d6f600029be06f359: Status 404 returned error can't find the container with id d4575d6b58b06dc337a4fc0d257ccc788877f00d26b4f14d6f600029be06f359 Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.318261 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.325037 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.541755 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.550164 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.557875 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.558049 4768 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.558096 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert podName:3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:12.558080349 +0000 UTC m=+1129.477416772 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert") pod "infra-operator-controller-manager-57548d458d-vj7sm" (UID: "3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:10 crc kubenswrapper[4768]: W1203 16:37:10.565017 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod982d5154_f537_4205_b268_3ce9aa7bdc37.slice/crio-c2a6c4c5e7f1b1977ac9873a6b5ea67a2010ff95b47d0940e619ac6e245b437e WatchSource:0}: Error finding container c2a6c4c5e7f1b1977ac9873a6b5ea67a2010ff95b47d0940e619ac6e245b437e: Status 404 returned error can't find the container with id c2a6c4c5e7f1b1977ac9873a6b5ea67a2010ff95b47d0940e619ac6e245b437e Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.577607 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv"] Dec 03 16:37:10 crc kubenswrapper[4768]: W1203 16:37:10.582202 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1ad0bfb_c581_4a0c_92ab_2c702d87f3e5.slice/crio-8262479221d0bf9c195ae68e3bcb41279f3e19f031aae88218ecc07c0e269a00 WatchSource:0}: Error finding container 8262479221d0bf9c195ae68e3bcb41279f3e19f031aae88218ecc07c0e269a00: Status 404 returned error can't find the container with id 8262479221d0bf9c195ae68e3bcb41279f3e19f031aae88218ecc07c0e269a00 Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.584213 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z7b7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5b6647b8f8-ztfkl_openstack-operators(f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: W1203 16:37:10.585432 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3af024c_b6f0_45c8_b5ab_6873b661878e.slice/crio-c5ce0a5769a6c89c3a6d51b81d44fa0c53302de389c395a177e48e77a8261b74 WatchSource:0}: Error finding container c5ce0a5769a6c89c3a6d51b81d44fa0c53302de389c395a177e48e77a8261b74: Status 404 returned error can't find the container with id c5ce0a5769a6c89c3a6d51b81d44fa0c53302de389c395a177e48e77a8261b74 Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.586450 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z7b7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5b6647b8f8-ztfkl_openstack-operators(f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.586782 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd"] Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.589648 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" podUID="f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.590623 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9c5tz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d2m42_openstack-operators(e3af024c-b6f0-45c8-b5ab-6873b661878e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: W1203 16:37:10.592058 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod347b8067_6147_477e_b00b_a5a60a29b7d8.slice/crio-c9d0b6938ac60be6f7cf4323f34d95edfa04e8de2c4af9f0eaebdbdf32f8e3ce WatchSource:0}: Error finding container c9d0b6938ac60be6f7cf4323f34d95edfa04e8de2c4af9f0eaebdbdf32f8e3ce: Status 404 returned error can't find the container with id c9d0b6938ac60be6f7cf4323f34d95edfa04e8de2c4af9f0eaebdbdf32f8e3ce Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.607985 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.613949 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.624681 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d2m42"] Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.627841 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9c5tz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d2m42_openstack-operators(e3af024c-b6f0-45c8-b5ab-6873b661878e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.627941 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hr6w9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-vkjxc_openstack-operators(f168e437-903a-4624-a0bc-95ea6b0e1789): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.628093 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7lrvr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-n2qpd_openstack-operators(347b8067-6147-477e-b00b-a5a60a29b7d8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.628109 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cqnqq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-s6lpx_openstack-operators(7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.629285 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" podUID="e3af024c-b6f0-45c8-b5ab-6873b661878e" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.629330 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" podUID="f168e437-903a-4624-a0bc-95ea6b0e1789" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.630037 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7lrvr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-n2qpd_openstack-operators(347b8067-6147-477e-b00b-a5a60a29b7d8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.630957 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cqnqq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-s6lpx_openstack-operators(7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.631089 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" podUID="347b8067-6147-477e-b00b-a5a60a29b7d8" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.632257 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" podUID="7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa" Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.638064 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx"] Dec 03 16:37:10 crc kubenswrapper[4768]: I1203 16:37:10.865844 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.866190 4768 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:10 crc kubenswrapper[4768]: E1203 16:37:10.866320 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert podName:bae1e6ba-54bf-411a-a2b9-b79b8ff85210 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:12.866291422 +0000 UTC m=+1129.785627845 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" (UID: "bae1e6ba-54bf-411a-a2b9-b79b8ff85210") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.070733 4768 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.070826 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:13.070810877 +0000 UTC m=+1129.990147290 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "webhook-server-cert" not found Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.071244 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.071334 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.071438 4768 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.071586 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:13.071574746 +0000 UTC m=+1129.990911169 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "metrics-server-cert" not found Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.122131 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" event={"ID":"34791f4b-32bc-44e5-90ca-ec286f96fe15","Type":"ContainerStarted","Data":"71064ecedaaee5e776a76e0bf92d66c7cb4acd3b22f45ba06a077e619c844de0"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.124363 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" event={"ID":"f6440acf-55b8-48fb-b212-550dcc9e9600","Type":"ContainerStarted","Data":"560dce68c060beefa9d781b88c249627fb85ee1a22f5b4892ec678eb4702654e"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.132454 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" event={"ID":"7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa","Type":"ContainerStarted","Data":"6a9a44c6ec2a1e5d282a8e9e255679ba05f5f7fd9d8b8f1231b285c5db9b0151"} Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.135807 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" podUID="7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa" Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.136048 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" event={"ID":"e3af024c-b6f0-45c8-b5ab-6873b661878e","Type":"ContainerStarted","Data":"c5ce0a5769a6c89c3a6d51b81d44fa0c53302de389c395a177e48e77a8261b74"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.137298 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" event={"ID":"f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5","Type":"ContainerStarted","Data":"8262479221d0bf9c195ae68e3bcb41279f3e19f031aae88218ecc07c0e269a00"} Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.138549 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" podUID="f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5" Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.140676 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" podUID="e3af024c-b6f0-45c8-b5ab-6873b661878e" Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.146024 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" event={"ID":"982d5154-f537-4205-b268-3ce9aa7bdc37","Type":"ContainerStarted","Data":"c2a6c4c5e7f1b1977ac9873a6b5ea67a2010ff95b47d0940e619ac6e245b437e"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.219128 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" event={"ID":"dc9eedd8-2956-447b-9a21-7b71bcb0c8c4","Type":"ContainerStarted","Data":"21327b1232aea912a4e948b9312374344cd99e93e1face426161c15d5748a3f4"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.221986 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" event={"ID":"051e5034-1155-4000-9d5b-96ee80ba6968","Type":"ContainerStarted","Data":"0982a8462bfe5a53cfc6a36d7d7d88a1d2a68149af52bead4f79e6f5cbeea339"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.282860 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" event={"ID":"f168e437-903a-4624-a0bc-95ea6b0e1789","Type":"ContainerStarted","Data":"738d5134a0caa011d1017ee8e8dc486ac608530fea481df40dfb9f83f363b2e5"} Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.284188 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" podUID="f168e437-903a-4624-a0bc-95ea6b0e1789" Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.292321 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" event={"ID":"347b8067-6147-477e-b00b-a5a60a29b7d8","Type":"ContainerStarted","Data":"c9d0b6938ac60be6f7cf4323f34d95edfa04e8de2c4af9f0eaebdbdf32f8e3ce"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.302813 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" event={"ID":"bec968a9-b8ec-48f3-9625-96ce1f1e2dda","Type":"ContainerStarted","Data":"c3be458c2e9d59a42379d799a1283e802a5d882a198ad2ba97c2d1746a4a9347"} Dec 03 16:37:11 crc kubenswrapper[4768]: E1203 16:37:11.303062 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" podUID="347b8067-6147-477e-b00b-a5a60a29b7d8" Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.324829 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" event={"ID":"29a3455b-b1d4-496e-936b-348846b289e0","Type":"ContainerStarted","Data":"d4575d6b58b06dc337a4fc0d257ccc788877f00d26b4f14d6f600029be06f359"} Dec 03 16:37:11 crc kubenswrapper[4768]: I1203 16:37:11.333927 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" event={"ID":"a9dcaa43-ad02-45aa-a320-dd9d2c609bf4","Type":"ContainerStarted","Data":"5b40b3657756f8f2d486204fbb1f79e5b2be5c0c4ca614afea9d7d3da9bcb07d"} Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.362637 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" podUID="347b8067-6147-477e-b00b-a5a60a29b7d8" Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.362947 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" podUID="f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5" Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.363016 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" podUID="f168e437-903a-4624-a0bc-95ea6b0e1789" Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.363651 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" podUID="e3af024c-b6f0-45c8-b5ab-6873b661878e" Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.363838 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" podUID="7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa" Dec 03 16:37:12 crc kubenswrapper[4768]: I1203 16:37:12.618413 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.618560 4768 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.618622 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert podName:3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:16.618594812 +0000 UTC m=+1133.537931235 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert") pod "infra-operator-controller-manager-57548d458d-vj7sm" (UID: "3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:12 crc kubenswrapper[4768]: I1203 16:37:12.923908 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.924653 4768 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:12 crc kubenswrapper[4768]: E1203 16:37:12.924714 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert podName:bae1e6ba-54bf-411a-a2b9-b79b8ff85210 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:16.924697742 +0000 UTC m=+1133.844034165 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" (UID: "bae1e6ba-54bf-411a-a2b9-b79b8ff85210") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:13 crc kubenswrapper[4768]: I1203 16:37:13.127697 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:13 crc kubenswrapper[4768]: I1203 16:37:13.127746 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:13 crc kubenswrapper[4768]: E1203 16:37:13.127866 4768 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:37:13 crc kubenswrapper[4768]: E1203 16:37:13.127911 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:17.127897624 +0000 UTC m=+1134.047234037 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "metrics-server-cert" not found Dec 03 16:37:13 crc kubenswrapper[4768]: E1203 16:37:13.128431 4768 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:37:13 crc kubenswrapper[4768]: E1203 16:37:13.128517 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:17.128494989 +0000 UTC m=+1134.047831482 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "webhook-server-cert" not found Dec 03 16:37:16 crc kubenswrapper[4768]: I1203 16:37:16.686290 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:16 crc kubenswrapper[4768]: E1203 16:37:16.686477 4768 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:16 crc kubenswrapper[4768]: E1203 16:37:16.687065 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert podName:3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:24.687044312 +0000 UTC m=+1141.606380735 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert") pod "infra-operator-controller-manager-57548d458d-vj7sm" (UID: "3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1") : secret "infra-operator-webhook-server-cert" not found Dec 03 16:37:16 crc kubenswrapper[4768]: I1203 16:37:16.991556 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:16 crc kubenswrapper[4768]: E1203 16:37:16.991760 4768 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:16 crc kubenswrapper[4768]: E1203 16:37:16.991806 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert podName:bae1e6ba-54bf-411a-a2b9-b79b8ff85210 nodeName:}" failed. No retries permitted until 2025-12-03 16:37:24.991792078 +0000 UTC m=+1141.911128501 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" (UID: "bae1e6ba-54bf-411a-a2b9-b79b8ff85210") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 03 16:37:17 crc kubenswrapper[4768]: I1203 16:37:17.193350 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:17 crc kubenswrapper[4768]: I1203 16:37:17.193479 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:17 crc kubenswrapper[4768]: E1203 16:37:17.193613 4768 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 03 16:37:17 crc kubenswrapper[4768]: E1203 16:37:17.193610 4768 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 03 16:37:17 crc kubenswrapper[4768]: E1203 16:37:17.193660 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:25.193646395 +0000 UTC m=+1142.112982818 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "webhook-server-cert" not found Dec 03 16:37:17 crc kubenswrapper[4768]: E1203 16:37:17.193689 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs podName:5a2dd1fe-2811-43db-959d-aceff599106d nodeName:}" failed. No retries permitted until 2025-12-03 16:37:25.193671906 +0000 UTC m=+1142.113008329 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs") pod "openstack-operator-controller-manager-df58498df-fdv5l" (UID: "5a2dd1fe-2811-43db-959d-aceff599106d") : secret "metrics-server-cert" not found Dec 03 16:37:23 crc kubenswrapper[4768]: E1203 16:37:23.833830 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 03 16:37:23 crc kubenswrapper[4768]: E1203 16:37:23.834663 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-htnz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-zv6pv_openstack-operators(a9dcaa43-ad02-45aa-a320-dd9d2c609bf4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:24 crc kubenswrapper[4768]: I1203 16:37:24.702036 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:24 crc kubenswrapper[4768]: I1203 16:37:24.708567 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1-cert\") pod \"infra-operator-controller-manager-57548d458d-vj7sm\" (UID: \"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:24 crc kubenswrapper[4768]: I1203 16:37:24.975774 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-7hkx8" Dec 03 16:37:24 crc kubenswrapper[4768]: I1203 16:37:24.985118 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.006326 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.010178 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/bae1e6ba-54bf-411a-a2b9-b79b8ff85210-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp\" (UID: \"bae1e6ba-54bf-411a-a2b9-b79b8ff85210\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.209727 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.209824 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.216067 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-metrics-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.225512 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/5a2dd1fe-2811-43db-959d-aceff599106d-webhook-certs\") pod \"openstack-operator-controller-manager-df58498df-fdv5l\" (UID: \"5a2dd1fe-2811-43db-959d-aceff599106d\") " pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.268720 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-phblj" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.277411 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.312576 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-tlkvt" Dec 03 16:37:25 crc kubenswrapper[4768]: I1203 16:37:25.320701 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:25 crc kubenswrapper[4768]: E1203 16:37:25.710622 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 03 16:37:25 crc kubenswrapper[4768]: E1203 16:37:25.711002 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dzsfv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-7pfgm_openstack-operators(dc9eedd8-2956-447b-9a21-7b71bcb0c8c4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.028799 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.029297 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.029340 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.030026 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5d2288acefb9ba0668d7ad6cd6de7be9ae141e8a037ddbecc4ef7c8eb139eec9"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.030086 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://5d2288acefb9ba0668d7ad6cd6de7be9ae141e8a037ddbecc4ef7c8eb139eec9" gracePeriod=600 Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.110603 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l"] Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.232741 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp"] Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.243788 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm"] Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.468411 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" event={"ID":"085d4818-0975-441d-87fc-8c22aa78d86f","Type":"ContainerStarted","Data":"18045d3a465b3e1deffe47abb5dcd1a7b6d91dc6bde649e306eaf4d82b11cf90"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.472939 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" event={"ID":"5a2dd1fe-2811-43db-959d-aceff599106d","Type":"ContainerStarted","Data":"7281741f1797a3657dcab162e6c4f4dae7629a9bf70de67c0a4fe16a1c34e2a1"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.493556 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" event={"ID":"f6440acf-55b8-48fb-b212-550dcc9e9600","Type":"ContainerStarted","Data":"987ef12c5b6b53b95bbf68eb32842491426528fb2e16feb61fcaa3d16309404b"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.517880 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"5d2288acefb9ba0668d7ad6cd6de7be9ae141e8a037ddbecc4ef7c8eb139eec9"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.517929 4768 scope.go:117] "RemoveContainer" containerID="3989add581768ea157c22213a30e680cfe61b22aa425273f58411706a4cfe346" Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.517885 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="5d2288acefb9ba0668d7ad6cd6de7be9ae141e8a037ddbecc4ef7c8eb139eec9" exitCode=0 Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.520915 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" event={"ID":"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1","Type":"ContainerStarted","Data":"f6a8ffec2e1d98617675a147e3be859fc94ac6a48e3de784030c87e4d76bae5a"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.527049 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" event={"ID":"ff2d8ce7-0093-406f-982e-dac8b2b62593","Type":"ContainerStarted","Data":"707e5ad647bf561110b6638a344ea5871ea4e31726dd645be61656f5dda3df11"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.528848 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" event={"ID":"34791f4b-32bc-44e5-90ca-ec286f96fe15","Type":"ContainerStarted","Data":"fdfd957a0f172d70867191fb11ef80cecfb7676bd5d6d66aa15e0c7bd862255b"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.532744 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" event={"ID":"ac20b433-8d19-4ffc-a3d8-001ab7660cfb","Type":"ContainerStarted","Data":"4150c85a2a9ef6a67ba9ddf00acbe081fd53d37d78f0587f13603b169f168e67"} Dec 03 16:37:26 crc kubenswrapper[4768]: E1203 16:37:26.533862 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hr6w9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-vkjxc_openstack-operators(f168e437-903a-4624-a0bc-95ea6b0e1789): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:26 crc kubenswrapper[4768]: E1203 16:37:26.535418 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" podUID="f168e437-903a-4624-a0bc-95ea6b0e1789" Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.540890 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" event={"ID":"bae1e6ba-54bf-411a-a2b9-b79b8ff85210","Type":"ContainerStarted","Data":"4f84b2fde5159fe73a52ac07f5ed5e1c1aa0937a176c457389d7cfc233dd4878"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.553456 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" event={"ID":"f91ea1ca-d4a3-47c9-a5a8-38a78224668a","Type":"ContainerStarted","Data":"44436811c1a9181d3b3fa3528fecf3ba40b2c1888464f9d0c7f98f0d681fab35"} Dec 03 16:37:26 crc kubenswrapper[4768]: E1203 16:37:26.555181 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5q4r4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-lg92p_openstack-operators(051e5034-1155-4000-9d5b-96ee80ba6968): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:26 crc kubenswrapper[4768]: E1203 16:37:26.556415 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" podUID="051e5034-1155-4000-9d5b-96ee80ba6968" Dec 03 16:37:26 crc kubenswrapper[4768]: E1203 16:37:26.558557 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lpf76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-jgz9x_openstack-operators(0eb6c4a6-a68d-4d28-9b09-64a3dd981978): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:37:26 crc kubenswrapper[4768]: E1203 16:37:26.559626 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" podUID="0eb6c4a6-a68d-4d28-9b09-64a3dd981978" Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.560083 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" event={"ID":"ad48f666-a22a-4d97-9736-5f284268bd4a","Type":"ContainerStarted","Data":"1d86bf6c311f3a5e7f71f1aef02a10425d1320d4f7af330b9755afd34332cbf0"} Dec 03 16:37:26 crc kubenswrapper[4768]: I1203 16:37:26.571530 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" event={"ID":"29a3455b-b1d4-496e-936b-348846b289e0","Type":"ContainerStarted","Data":"3c9c1e190a6f384452d7632213cab16601d87ffbc676c6417c395ab3f21de19a"} Dec 03 16:37:27 crc kubenswrapper[4768]: I1203 16:37:27.593681 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" event={"ID":"982d5154-f537-4205-b268-3ce9aa7bdc37","Type":"ContainerStarted","Data":"ea2d16c16263126fa0fa2676ef7b366f858301b4dcb774af329b24efdaf61063"} Dec 03 16:37:27 crc kubenswrapper[4768]: I1203 16:37:27.601960 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" event={"ID":"0eb6c4a6-a68d-4d28-9b09-64a3dd981978","Type":"ContainerStarted","Data":"e19481ca8cd48400c6b7552fee5e26fe6ef10b5950ab4fb3a60bae054ad4b556"} Dec 03 16:37:27 crc kubenswrapper[4768]: I1203 16:37:27.602588 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" Dec 03 16:37:27 crc kubenswrapper[4768]: E1203 16:37:27.610579 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" podUID="0eb6c4a6-a68d-4d28-9b09-64a3dd981978" Dec 03 16:37:27 crc kubenswrapper[4768]: I1203 16:37:27.611445 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" event={"ID":"8cb78567-ca7b-4a8b-9f94-b503727cf509","Type":"ContainerStarted","Data":"26bdec3d898e9a60a0bc6d2833a95322acd5089389dee976be3e02dc6815c74c"} Dec 03 16:37:27 crc kubenswrapper[4768]: I1203 16:37:27.618335 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" event={"ID":"051e5034-1155-4000-9d5b-96ee80ba6968","Type":"ContainerStarted","Data":"c752091342ab1734b8452e6aa8d1b5eaf27eb597b7a59f5b62cacd4287586372"} Dec 03 16:37:27 crc kubenswrapper[4768]: I1203 16:37:27.619101 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" Dec 03 16:37:27 crc kubenswrapper[4768]: E1203 16:37:27.620322 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" podUID="051e5034-1155-4000-9d5b-96ee80ba6968" Dec 03 16:37:28 crc kubenswrapper[4768]: I1203 16:37:28.627718 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" event={"ID":"5a2dd1fe-2811-43db-959d-aceff599106d","Type":"ContainerStarted","Data":"b98c78bde519f07ff31ef0b0e6cbe1023c68bfc740a6cc666ff4dda98375f2e8"} Dec 03 16:37:28 crc kubenswrapper[4768]: I1203 16:37:28.628531 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:28 crc kubenswrapper[4768]: I1203 16:37:28.634383 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"8838d0fd2aebc4b8c2edf99cdba00c8f80a4f57bd4eb36faafb771ad2678be15"} Dec 03 16:37:28 crc kubenswrapper[4768]: I1203 16:37:28.637027 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" event={"ID":"bec968a9-b8ec-48f3-9625-96ce1f1e2dda","Type":"ContainerStarted","Data":"b2dd98f16bf854aa8633a86e20ef808ff57b5779b6f98153542119499e794c32"} Dec 03 16:37:28 crc kubenswrapper[4768]: E1203 16:37:28.638506 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" podUID="0eb6c4a6-a68d-4d28-9b09-64a3dd981978" Dec 03 16:37:28 crc kubenswrapper[4768]: E1203 16:37:28.638795 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" podUID="051e5034-1155-4000-9d5b-96ee80ba6968" Dec 03 16:37:28 crc kubenswrapper[4768]: I1203 16:37:28.667209 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" podStartSLOduration=19.667190595 podStartE2EDuration="19.667190595s" podCreationTimestamp="2025-12-03 16:37:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:37:28.664309572 +0000 UTC m=+1145.583645995" watchObservedRunningTime="2025-12-03 16:37:28.667190595 +0000 UTC m=+1145.586527018" Dec 03 16:37:35 crc kubenswrapper[4768]: I1203 16:37:35.287963 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-df58498df-fdv5l" Dec 03 16:37:39 crc kubenswrapper[4768]: I1203 16:37:39.182041 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" Dec 03 16:37:39 crc kubenswrapper[4768]: E1203 16:37:39.521676 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21" Dec 03 16:37:39 crc kubenswrapper[4768]: E1203 16:37:39.521729 4768 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21" Dec 03 16:37:39 crc kubenswrapper[4768]: E1203 16:37:39.521889 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-z7b7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5b6647b8f8-ztfkl_openstack-operators(f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:39 crc kubenswrapper[4768]: I1203 16:37:39.545073 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.009092 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.009796 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-baremetal-operator-agent:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_ANSIBLEEE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_EVALUATOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-evaluator:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_AODH_NOTIFIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-aodh-notifier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_APACHE_IMAGE_URL_DEFAULT,Value:registry.redhat.io/ubi9/httpd-24:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_KEYSTONE_LISTENER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-keystone-listener:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BARBICAN_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-barbican-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_IPMI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-ipmi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_MYSQLD_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/mysqld-exporter:v0.15.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_NOTIFICATION_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ceilometer-notification:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CEILOMETER_SGCORE_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_BACKUP_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-backup:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CINDER_VOLUME_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cinder-volume:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_API_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CLOUDKITTY_PROC_IMAGE_URL_DEFAULT,Value:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-processor:current,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_BACKENDBIND9_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-backend-bind9:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_CENTRAL_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-central:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_MDNS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-mdns:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_PRODUCER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-producer:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_UNBOUND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-unbound:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_DESIGNATE_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-designate-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_FRR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-frr:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_ISCSID_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-iscsid:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_KEPLER_IMAGE_URL_DEFAULT,Value:quay.io/sustainable_computing_io/kepler:release-0.7.12,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_LOGROTATE_CROND_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-cron:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_MULTIPATHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-multipathd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_DHCP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-dhcp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_METADATA_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-metadata-agent-ovn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_OVN_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-ovn-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NEUTRON_SRIOV_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-sriov-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_NODE_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/prometheus/node-exporter:v1.5.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_OVN_BGP_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-bgp-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_EDPM_PODMAN_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/navidys/prometheus-podman-exporter:v1.10.1,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_GLANCE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_CFNAPI_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-api-cfn:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HEAT_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-heat-engine:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_HORIZON_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_MEMCACHED_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_INFRA_REDIS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-redis:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_INSPECTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-inspector:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_NEUTRON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-neutron-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PXE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ironic-pxe:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_IRONIC_PYTHON_AGENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/ironic-python-agent:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KEYSTONE_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_KSM_IMAGE_URL_DEFAULT,Value:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MANILA_SHARE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-manila-share:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_MARIADB_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NET_UTILS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-netutils:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NEUTRON_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_COMPUTE_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-compute:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_CONDUCTOR_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-conductor:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_NOVNC_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-novncproxy:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_NOVA_SCHEDULER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-nova-scheduler:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HEALTHMANAGER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-health-manager:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_HOUSEKEEPING_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-housekeeping:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_RSYSLOG_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rsyslog:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OCTAVIA_WORKER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-octavia-worker:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_CLIENT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-openstackclient:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_MUST_GATHER_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-must-gather:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OPENSTACK_NETWORK_EXPORTER_IMAGE_URL_DEFAULT,Value:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OS_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/edpm-hardened-uefi:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_CONTROLLER_OVS_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_NORTHD_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-northd:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OVN_SB_DBCLUSTER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PLACEMENT_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_RABBITMQ_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_ACCOUNT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-account:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_CONTAINER_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-container:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_OBJECT_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-object:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_SWIFT_PROXY_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_TEST_TEMPEST_IMAGE_URL_DEFAULT,Value:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_API_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-api:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_APPLIER_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-applier:current-podified,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_WATCHER_DECISION_ENGINE_IMAGE_URL_DEFAULT,Value:quay.io/podified-master-centos9/openstack-watcher-decision-engine:current-podified,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bv7gm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp_openstack-operators(bae1e6ba-54bf-411a-a2b9-b79b8ff85210): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.339158 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.339330 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dzsfv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-7pfgm_openstack-operators(dc9eedd8-2956-447b-9a21-7b71bcb0c8c4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.340521 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" podUID="dc9eedd8-2956-447b-9a21-7b71bcb0c8c4" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.341510 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.341672 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gjz24,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-9n7sr_openstack-operators(34791f4b-32bc-44e5-90ca-ec286f96fe15): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" logger="UnhandledError" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.342913 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \\\"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\\\": context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" podUID="34791f4b-32bc-44e5-90ca-ec286f96fe15" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.359469 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.359629 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-htnz5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-zv6pv_openstack-operators(a9dcaa43-ad02-45aa-a320-dd9d2c609bf4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.360809 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" podUID="a9dcaa43-ad02-45aa-a320-dd9d2c609bf4" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.534258 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" podUID="f168e437-903a-4624-a0bc-95ea6b0e1789" Dec 03 16:37:40 crc kubenswrapper[4768]: I1203 16:37:40.772941 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.775193 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" podUID="34791f4b-32bc-44e5-90ca-ec286f96fe15" Dec 03 16:37:40 crc kubenswrapper[4768]: I1203 16:37:40.775650 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.808506 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.808797 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-86mgr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-jwxs2_openstack-operators(f6440acf-55b8-48fb-b212-550dcc9e9600): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" logger="UnhandledError" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.810067 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \\\"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\\\": context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" podUID="f6440acf-55b8-48fb-b212-550dcc9e9600" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.835900 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 03 16:37:40 crc kubenswrapper[4768]: E1203 16:37:40.836128 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9c5tz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d2m42_openstack-operators(e3af024c-b6f0-45c8-b5ab-6873b661878e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.348566 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.348993 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5szhk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-qt8cv_openstack-operators(982d5154-f537-4205-b268-3ce9aa7bdc37): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.350332 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \\\"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\\\": context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" podUID="982d5154-f537-4205-b268-3ce9aa7bdc37" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.361154 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.361324 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cqnqq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-s6lpx_openstack-operators(7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: I1203 16:37:41.781353 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" Dec 03 16:37:41 crc kubenswrapper[4768]: I1203 16:37:41.781405 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" Dec 03 16:37:41 crc kubenswrapper[4768]: I1203 16:37:41.782894 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" Dec 03 16:37:41 crc kubenswrapper[4768]: I1203 16:37:41.783204 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.783386 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" podUID="f6440acf-55b8-48fb-b212-550dcc9e9600" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.783522 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" podUID="982d5154-f537-4205-b268-3ce9aa7bdc37" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.784362 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" podUID="34791f4b-32bc-44e5-90ca-ec286f96fe15" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.810977 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.811169 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-msbqx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-9q857_openstack-operators(ad48f666-a22a-4d97-9736-5f284268bd4a): ErrImagePull: rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\": context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.812371 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = reading blob sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2: Get \\\"https://quay.io/v2/openstack-k8s-operators/kube-rbac-proxy/blobs/sha256:723607448b0abc536cd883abffcf6942c1c562a48117db73f6fe693d99395ee2\\\": context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" podUID="ad48f666-a22a-4d97-9736-5f284268bd4a" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.821814 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.821972 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kklzn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-m97mz_openstack-operators(085d4818-0975-441d-87fc-8c22aa78d86f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.823068 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.823142 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mtwrl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-bfpmf_openstack-operators(29a3455b-b1d4-496e-936b-348846b289e0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.824348 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" podUID="29a3455b-b1d4-496e-936b-348846b289e0" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.824427 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" podUID="085d4818-0975-441d-87fc-8c22aa78d86f" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.825521 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.825662 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2l9dd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-57548d458d-vj7sm_openstack-operators(3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.845958 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.846169 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9ls5d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-g5nnn_openstack-operators(8cb78567-ca7b-4a8b-9f94-b503727cf509): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:41 crc kubenswrapper[4768]: E1203 16:37:41.848057 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" podUID="8cb78567-ca7b-4a8b-9f94-b503727cf509" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.318582 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.318795 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gfw7r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-5mfx6_openstack-operators(ac20b433-8d19-4ffc-a3d8-001ab7660cfb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.319669 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.319765 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x59zz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-78b4bc895b-hg9tx_openstack-operators(ff2d8ce7-0093-406f-982e-dac8b2b62593): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.319921 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying layer: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.320010 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g58fx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-cnc7h_openstack-operators(bec968a9-b8ec-48f3-9625-96ce1f1e2dda): ErrImagePull: rpc error: code = Canceled desc = copying layer: context canceled" logger="UnhandledError" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.320073 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" podUID="ac20b433-8d19-4ffc-a3d8-001ab7660cfb" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.321621 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying layer: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" podUID="bec968a9-b8ec-48f3-9625-96ce1f1e2dda" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.321688 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" podUID="ff2d8ce7-0093-406f-982e-dac8b2b62593" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.342337 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.342526 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7lrvr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-n2qpd_openstack-operators(347b8067-6147-477e-b00b-a5a60a29b7d8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.372421 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.372576 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zk27v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-m8lmv_openstack-operators(f91ea1ca-d4a3-47c9-a5a8-38a78224668a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.374585 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" podUID="f91ea1ca-d4a3-47c9-a5a8-38a78224668a" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.808111 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" event={"ID":"0eb6c4a6-a68d-4d28-9b09-64a3dd981978","Type":"ContainerStarted","Data":"da3c8758a3577f1118535c341b991239fa66a52d176442f7e02d9605b3c0df29"} Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.810335 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.810370 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.810384 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.810397 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.810425 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.812547 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.812864 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.813327 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.814258 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" Dec 03 16:37:42 crc kubenswrapper[4768]: I1203 16:37:42.814617 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.871911 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" podUID="e3af024c-b6f0-45c8-b5ab-6873b661878e" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.929412 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" podUID="3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.935078 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" podUID="bae1e6ba-54bf-411a-a2b9-b79b8ff85210" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.940168 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" podUID="f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5" Dec 03 16:37:42 crc kubenswrapper[4768]: E1203 16:37:42.983843 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" podUID="347b8067-6147-477e-b00b-a5a60a29b7d8" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.134467 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-jgz9x" podStartSLOduration=2.575497119 podStartE2EDuration="35.134450351s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:09.824134895 +0000 UTC m=+1126.743471318" lastFinishedPulling="2025-12-03 16:37:42.383088127 +0000 UTC m=+1159.302424550" observedRunningTime="2025-12-03 16:37:43.131509337 +0000 UTC m=+1160.050845760" watchObservedRunningTime="2025-12-03 16:37:43.134450351 +0000 UTC m=+1160.053786774" Dec 03 16:37:43 crc kubenswrapper[4768]: E1203 16:37:43.637573 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" podUID="7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.814582 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" event={"ID":"bae1e6ba-54bf-411a-a2b9-b79b8ff85210","Type":"ContainerStarted","Data":"de9ed29b4df6e89e5fa3c6b5aca4a9daae96d518558371d6bcf682f28a84bf19"} Dec 03 16:37:43 crc kubenswrapper[4768]: E1203 16:37:43.816151 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" podUID="bae1e6ba-54bf-411a-a2b9-b79b8ff85210" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.816730 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" event={"ID":"ad48f666-a22a-4d97-9736-5f284268bd4a","Type":"ContainerStarted","Data":"ae4dbb2c469ac0dbc68af253d554cb8ddd0f1673e23ef40eaedd7d3485f7c570"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.817617 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.819383 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" event={"ID":"8cb78567-ca7b-4a8b-9f94-b503727cf509","Type":"ContainerStarted","Data":"f5d96eb1e5fda2bef1d8781db485a81adf270a50be7017b87999ff47147e3aef"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.820742 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.821087 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" event={"ID":"ff2d8ce7-0093-406f-982e-dac8b2b62593","Type":"ContainerStarted","Data":"5510feab0a8bbb4c17114ca974e503e7782eca23f58f471f9d79761f1b9deda8"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.824207 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" event={"ID":"051e5034-1155-4000-9d5b-96ee80ba6968","Type":"ContainerStarted","Data":"5e416b3d24f8cb3719816b0f996386f25b641fcc25eb9004b4ae9cae3a46b5cc"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.825816 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" event={"ID":"ac20b433-8d19-4ffc-a3d8-001ab7660cfb","Type":"ContainerStarted","Data":"602ad9c10a0c53fe1bcd4675f50c37c609d6f68fdad6f252ddbad3c84d5a3dd6"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.826044 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.827428 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.827724 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" event={"ID":"085d4818-0975-441d-87fc-8c22aa78d86f","Type":"ContainerStarted","Data":"2f9aa202aa67ff8aab3228f5283dc1c3a9569911c4f4d78d00c0a9f84d28cf5d"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.828894 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" event={"ID":"347b8067-6147-477e-b00b-a5a60a29b7d8","Type":"ContainerStarted","Data":"72ec8187f0d588e1f111f275935ab65b8832dac939e1901af3e2ce09f7770226"} Dec 03 16:37:43 crc kubenswrapper[4768]: E1203 16:37:43.829773 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" podUID="347b8067-6147-477e-b00b-a5a60a29b7d8" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.830366 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" event={"ID":"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1","Type":"ContainerStarted","Data":"f3e2df4d8adc7b6704b73f1383f2d49a30dc17dd67328b8842b6a0100e2879e5"} Dec 03 16:37:43 crc kubenswrapper[4768]: E1203 16:37:43.831634 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" podUID="3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.831974 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" event={"ID":"bec968a9-b8ec-48f3-9625-96ce1f1e2dda","Type":"ContainerStarted","Data":"c80e07b573499c1d3116e6a0f61389499ab86b9d78d549f5a545a5558b3131c8"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.832239 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.833854 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.836324 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" event={"ID":"29a3455b-b1d4-496e-936b-348846b289e0","Type":"ContainerStarted","Data":"a2707347074d49feedba7e650f7363e9164c38b943862da58f04f8b4ca8a0886"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.842214 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" event={"ID":"a9dcaa43-ad02-45aa-a320-dd9d2c609bf4","Type":"ContainerStarted","Data":"6ecca4c183a9f77b333015d58c30c98f0fc5c8dfcf0466c36b7003e599603869"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.842256 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" event={"ID":"a9dcaa43-ad02-45aa-a320-dd9d2c609bf4","Type":"ContainerStarted","Data":"3da4ebf13e0213e8569cd71af3a7e5d39a1fbb94598342cde73053a7b90ef480"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.842463 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.843847 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" event={"ID":"f6440acf-55b8-48fb-b212-550dcc9e9600","Type":"ContainerStarted","Data":"4293fb51ec6ece27a092413c63779ea0b2d7af66490dce8630ee42d280629dfb"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.845811 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" event={"ID":"dc9eedd8-2956-447b-9a21-7b71bcb0c8c4","Type":"ContainerStarted","Data":"e0abbee3dda12319d0be15c53e5a127c20680bccbcba46511377497f21d64d91"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.845835 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" event={"ID":"dc9eedd8-2956-447b-9a21-7b71bcb0c8c4","Type":"ContainerStarted","Data":"e24974d946e56e70473727d88395718abc155f8aa6d007790b226565ec952664"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.846164 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.849184 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" event={"ID":"f91ea1ca-d4a3-47c9-a5a8-38a78224668a","Type":"ContainerStarted","Data":"b121bf775b2a3a53d27fe2aa5c2c93c5d60b65c9f91a0d7da3ff66a92dc18379"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.850288 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" event={"ID":"7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa","Type":"ContainerStarted","Data":"258e1bb4af87bf2746d8fa3c4e33ba7fe7e8b903543d7bfa8175128b3dfa1fbc"} Dec 03 16:37:43 crc kubenswrapper[4768]: E1203 16:37:43.851553 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" podUID="7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.851684 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" event={"ID":"e3af024c-b6f0-45c8-b5ab-6873b661878e","Type":"ContainerStarted","Data":"fd3a130e6a5b6f895b7a474bfc65e36c148e4e2ca44a591905a05ae4138cfd34"} Dec 03 16:37:43 crc kubenswrapper[4768]: E1203 16:37:43.852694 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" podUID="e3af024c-b6f0-45c8-b5ab-6873b661878e" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.853714 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" event={"ID":"f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5","Type":"ContainerStarted","Data":"94dcff59f1f1ed7433852fc06cef37ae7d3754357642c177e92ff614dfbb2d57"} Dec 03 16:37:43 crc kubenswrapper[4768]: E1203 16:37:43.854933 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" podUID="f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.855462 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" event={"ID":"982d5154-f537-4205-b268-3ce9aa7bdc37","Type":"ContainerStarted","Data":"667a16c8897a93d3b472bc5eca7fbbed969612978fca08d6c349f4cf40dde502"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.857589 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" event={"ID":"34791f4b-32bc-44e5-90ca-ec286f96fe15","Type":"ContainerStarted","Data":"982928b5963d90be4e6ac533ed22dc66b75e176a3714cd388bca53af68174179"} Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.930736 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cnc7h" podStartSLOduration=20.559325168 podStartE2EDuration="35.930719859s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.314628301 +0000 UTC m=+1127.233964724" lastFinishedPulling="2025-12-03 16:37:25.686022982 +0000 UTC m=+1142.605359415" observedRunningTime="2025-12-03 16:37:43.926963184 +0000 UTC m=+1160.846299607" watchObservedRunningTime="2025-12-03 16:37:43.930719859 +0000 UTC m=+1160.850056282" Dec 03 16:37:43 crc kubenswrapper[4768]: I1203 16:37:43.932352 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-5mfx6" podStartSLOduration=20.277570863 podStartE2EDuration="35.93234455s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.028326651 +0000 UTC m=+1126.947663074" lastFinishedPulling="2025-12-03 16:37:25.683100318 +0000 UTC m=+1142.602436761" observedRunningTime="2025-12-03 16:37:43.896390462 +0000 UTC m=+1160.815726885" watchObservedRunningTime="2025-12-03 16:37:43.93234455 +0000 UTC m=+1160.851680973" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.035145 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-qt8cv" podStartSLOduration=20.893400935 podStartE2EDuration="36.035125526s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.566700187 +0000 UTC m=+1127.486036610" lastFinishedPulling="2025-12-03 16:37:25.708424768 +0000 UTC m=+1142.627761201" observedRunningTime="2025-12-03 16:37:44.01391929 +0000 UTC m=+1160.933255723" watchObservedRunningTime="2025-12-03 16:37:44.035125526 +0000 UTC m=+1160.954461949" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.036455 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-m97mz" podStartSLOduration=19.907620941 podStartE2EDuration="36.036447069s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:09.575789343 +0000 UTC m=+1126.495125766" lastFinishedPulling="2025-12-03 16:37:25.704598081 +0000 UTC m=+1142.623951894" observedRunningTime="2025-12-03 16:37:44.031447633 +0000 UTC m=+1160.950784056" watchObservedRunningTime="2025-12-03 16:37:44.036447069 +0000 UTC m=+1160.955783492" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.070784 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-m8lmv" podStartSLOduration=20.112997027 podStartE2EDuration="36.070770396s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:09.726228522 +0000 UTC m=+1126.645564945" lastFinishedPulling="2025-12-03 16:37:25.684001881 +0000 UTC m=+1142.603338314" observedRunningTime="2025-12-03 16:37:44.066345594 +0000 UTC m=+1160.985682007" watchObservedRunningTime="2025-12-03 16:37:44.070770396 +0000 UTC m=+1160.990106819" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.090944 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hg9tx" podStartSLOduration=19.881378278 podStartE2EDuration="36.090924185s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:09.477220244 +0000 UTC m=+1126.396556667" lastFinishedPulling="2025-12-03 16:37:25.686766141 +0000 UTC m=+1142.606102574" observedRunningTime="2025-12-03 16:37:44.08479226 +0000 UTC m=+1161.004128693" watchObservedRunningTime="2025-12-03 16:37:44.090924185 +0000 UTC m=+1161.010260608" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.114169 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" podStartSLOduration=4.003764388 podStartE2EDuration="36.114152912s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.298417412 +0000 UTC m=+1127.217753835" lastFinishedPulling="2025-12-03 16:37:42.408805936 +0000 UTC m=+1159.328142359" observedRunningTime="2025-12-03 16:37:44.113948836 +0000 UTC m=+1161.033285259" watchObservedRunningTime="2025-12-03 16:37:44.114152912 +0000 UTC m=+1161.033489335" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.156062 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9q857" podStartSLOduration=20.215332071 podStartE2EDuration="36.156041099s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:09.744666708 +0000 UTC m=+1126.664003131" lastFinishedPulling="2025-12-03 16:37:25.685375726 +0000 UTC m=+1142.604712159" observedRunningTime="2025-12-03 16:37:44.14893491 +0000 UTC m=+1161.068271333" watchObservedRunningTime="2025-12-03 16:37:44.156041099 +0000 UTC m=+1161.075377522" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.193939 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" podStartSLOduration=4.063532146 podStartE2EDuration="36.193920906s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.30112635 +0000 UTC m=+1127.220462763" lastFinishedPulling="2025-12-03 16:37:42.4315151 +0000 UTC m=+1159.350851523" observedRunningTime="2025-12-03 16:37:44.175314706 +0000 UTC m=+1161.094651129" watchObservedRunningTime="2025-12-03 16:37:44.193920906 +0000 UTC m=+1161.113257329" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.224783 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-g5nnn" podStartSLOduration=20.373546827 podStartE2EDuration="36.224759955s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:09.833487271 +0000 UTC m=+1126.752823694" lastFinishedPulling="2025-12-03 16:37:25.684700379 +0000 UTC m=+1142.604036822" observedRunningTime="2025-12-03 16:37:44.202513063 +0000 UTC m=+1161.121849496" watchObservedRunningTime="2025-12-03 16:37:44.224759955 +0000 UTC m=+1161.144096378" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.236512 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-bfpmf" podStartSLOduration=20.863893009999998 podStartE2EDuration="36.236495431s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.309072871 +0000 UTC m=+1127.228409294" lastFinishedPulling="2025-12-03 16:37:25.681675282 +0000 UTC m=+1142.601011715" observedRunningTime="2025-12-03 16:37:44.227263158 +0000 UTC m=+1161.146599581" watchObservedRunningTime="2025-12-03 16:37:44.236495431 +0000 UTC m=+1161.155831844" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.306270 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jwxs2" podStartSLOduration=21.163022654 podStartE2EDuration="36.306249163s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.565137167 +0000 UTC m=+1127.484473590" lastFinishedPulling="2025-12-03 16:37:25.708363666 +0000 UTC m=+1142.627700099" observedRunningTime="2025-12-03 16:37:44.266263173 +0000 UTC m=+1161.185599596" watchObservedRunningTime="2025-12-03 16:37:44.306249163 +0000 UTC m=+1161.225585586" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.306851 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-lg92p" podStartSLOduration=3.368405546 podStartE2EDuration="35.306846678s" podCreationTimestamp="2025-12-03 16:37:09 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.547863121 +0000 UTC m=+1127.467199544" lastFinishedPulling="2025-12-03 16:37:42.486304253 +0000 UTC m=+1159.405640676" observedRunningTime="2025-12-03 16:37:44.287103749 +0000 UTC m=+1161.206440182" watchObservedRunningTime="2025-12-03 16:37:44.306846678 +0000 UTC m=+1161.226183101" Dec 03 16:37:44 crc kubenswrapper[4768]: I1203 16:37:44.312870 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-9n7sr" podStartSLOduration=20.927805464 podStartE2EDuration="36.31286102s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.301673414 +0000 UTC m=+1127.221009837" lastFinishedPulling="2025-12-03 16:37:25.68672896 +0000 UTC m=+1142.606065393" observedRunningTime="2025-12-03 16:37:44.307896264 +0000 UTC m=+1161.227232687" watchObservedRunningTime="2025-12-03 16:37:44.31286102 +0000 UTC m=+1161.232197443" Dec 03 16:37:44 crc kubenswrapper[4768]: E1203 16:37:44.869175 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-baremetal-operator@sha256:14cfad6ea2e7f7ecc4cb2aafceb9c61514b3d04b66668832d1e4ac3b19f1ab81\\\"\"" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" podUID="bae1e6ba-54bf-411a-a2b9-b79b8ff85210" Dec 03 16:37:44 crc kubenswrapper[4768]: E1203 16:37:44.869329 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:09a6d0613ee2d3c1c809fc36c22678458ac271e0da87c970aec0a5339f5423f7\\\"\"" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" podUID="3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1" Dec 03 16:37:49 crc kubenswrapper[4768]: I1203 16:37:49.273073 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-zv6pv" Dec 03 16:37:49 crc kubenswrapper[4768]: I1203 16:37:49.293433 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-7pfgm" Dec 03 16:37:54 crc kubenswrapper[4768]: E1203 16:37:54.533703 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" podUID="7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa" Dec 03 16:37:55 crc kubenswrapper[4768]: E1203 16:37:55.533249 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" podUID="e3af024c-b6f0-45c8-b5ab-6873b661878e" Dec 03 16:37:55 crc kubenswrapper[4768]: I1203 16:37:55.980759 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" event={"ID":"f168e437-903a-4624-a0bc-95ea6b0e1789","Type":"ContainerStarted","Data":"70cd205e33dbcc9d471f92c879b8aa0bebfb3ce62772fe72a8dcb7ccff6c8c0e"} Dec 03 16:37:56 crc kubenswrapper[4768]: I1203 16:37:56.005123 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-vkjxc" podStartSLOduration=2.182738634 podStartE2EDuration="47.00509606s" podCreationTimestamp="2025-12-03 16:37:09 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.62779341 +0000 UTC m=+1127.547129833" lastFinishedPulling="2025-12-03 16:37:55.450150826 +0000 UTC m=+1172.369487259" observedRunningTime="2025-12-03 16:37:55.998408511 +0000 UTC m=+1172.917744974" watchObservedRunningTime="2025-12-03 16:37:56.00509606 +0000 UTC m=+1172.924432503" Dec 03 16:37:56 crc kubenswrapper[4768]: E1203 16:37:56.534168 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\"" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" podUID="347b8067-6147-477e-b00b-a5a60a29b7d8" Dec 03 16:37:57 crc kubenswrapper[4768]: E1203 16:37:57.534225 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.18:5001/openstack-k8s-operators/telemetry-operator:8eb50ce62a3905aa1a1da4c6aeb639c250edca21\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" podUID="f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5" Dec 03 16:37:57 crc kubenswrapper[4768]: I1203 16:37:57.999110 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" event={"ID":"bae1e6ba-54bf-411a-a2b9-b79b8ff85210","Type":"ContainerStarted","Data":"3ec6bc2c87b327e9baeeafdd916707db5a067fd5c51cc0132ee5bdcf6d66c5cc"} Dec 03 16:37:57 crc kubenswrapper[4768]: I1203 16:37:57.999710 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:37:58 crc kubenswrapper[4768]: I1203 16:37:58.001832 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" event={"ID":"3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1","Type":"ContainerStarted","Data":"33377c8e8b9de154270c70aa9b090551f4043f4358c1462b7452d4886febbf12"} Dec 03 16:37:58 crc kubenswrapper[4768]: I1203 16:37:58.002913 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:37:58 crc kubenswrapper[4768]: I1203 16:37:58.060550 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" podStartSLOduration=19.469217833 podStartE2EDuration="50.060516075s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:26.379928045 +0000 UTC m=+1143.299264458" lastFinishedPulling="2025-12-03 16:37:56.971226267 +0000 UTC m=+1173.890562700" observedRunningTime="2025-12-03 16:37:58.041235988 +0000 UTC m=+1174.960572451" watchObservedRunningTime="2025-12-03 16:37:58.060516075 +0000 UTC m=+1174.979852538" Dec 03 16:37:58 crc kubenswrapper[4768]: I1203 16:37:58.073457 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" podStartSLOduration=19.412247314 podStartE2EDuration="50.073430011s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:26.380293674 +0000 UTC m=+1143.299630107" lastFinishedPulling="2025-12-03 16:37:57.041476371 +0000 UTC m=+1173.960812804" observedRunningTime="2025-12-03 16:37:58.072430866 +0000 UTC m=+1174.991767399" watchObservedRunningTime="2025-12-03 16:37:58.073430011 +0000 UTC m=+1174.992766474" Dec 03 16:38:05 crc kubenswrapper[4768]: I1203 16:38:04.994767 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-vj7sm" Dec 03 16:38:05 crc kubenswrapper[4768]: I1203 16:38:05.331813 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp" Dec 03 16:38:11 crc kubenswrapper[4768]: I1203 16:38:11.137710 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" event={"ID":"347b8067-6147-477e-b00b-a5a60a29b7d8","Type":"ContainerStarted","Data":"74864cfb68410a58b2dd846e77c7c74cf761762f138d3791a114b51273fb9fb2"} Dec 03 16:38:11 crc kubenswrapper[4768]: I1203 16:38:11.138819 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" Dec 03 16:38:11 crc kubenswrapper[4768]: I1203 16:38:11.161026 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" podStartSLOduration=3.585460703 podStartE2EDuration="1m3.161009389s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.628017825 +0000 UTC m=+1127.547354248" lastFinishedPulling="2025-12-03 16:38:10.203566471 +0000 UTC m=+1187.122902934" observedRunningTime="2025-12-03 16:38:11.155568462 +0000 UTC m=+1188.074904885" watchObservedRunningTime="2025-12-03 16:38:11.161009389 +0000 UTC m=+1188.080345812" Dec 03 16:38:12 crc kubenswrapper[4768]: I1203 16:38:12.148129 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" event={"ID":"7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa","Type":"ContainerStarted","Data":"d6abfaa0ba6bcc76ed96f34ad4fc6dc5d8a0cfaf869507750776735a8d48ea70"} Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.164187 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" event={"ID":"e3af024c-b6f0-45c8-b5ab-6873b661878e","Type":"ContainerStarted","Data":"6457d2385b7ce842c3675abcaf95600c5656b30a35b95b2abb1c8f69a35facc1"} Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.164800 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.166799 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" event={"ID":"f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5","Type":"ContainerStarted","Data":"18d828bc96b17b122395c4677a88717d310d7db7f4faa6241ed00f0cdd645cd3"} Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.166847 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.167007 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.201011 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" podStartSLOduration=5.165836501 podStartE2EDuration="1m5.200977333s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.584059485 +0000 UTC m=+1127.503395908" lastFinishedPulling="2025-12-03 16:38:10.619200317 +0000 UTC m=+1187.538536740" observedRunningTime="2025-12-03 16:38:13.199564208 +0000 UTC m=+1190.118900641" watchObservedRunningTime="2025-12-03 16:38:13.200977333 +0000 UTC m=+1190.120313756" Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.205656 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" podStartSLOduration=2.43531967 podStartE2EDuration="1m4.205645191s" podCreationTimestamp="2025-12-03 16:37:09 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.590480787 +0000 UTC m=+1127.509817210" lastFinishedPulling="2025-12-03 16:38:12.360806308 +0000 UTC m=+1189.280142731" observedRunningTime="2025-12-03 16:38:13.185213895 +0000 UTC m=+1190.104550328" watchObservedRunningTime="2025-12-03 16:38:13.205645191 +0000 UTC m=+1190.124981614" Dec 03 16:38:13 crc kubenswrapper[4768]: I1203 16:38:13.224032 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" podStartSLOduration=5.235908541 podStartE2EDuration="1m5.224012175s" podCreationTimestamp="2025-12-03 16:37:08 +0000 UTC" firstStartedPulling="2025-12-03 16:37:10.628045036 +0000 UTC m=+1127.547381459" lastFinishedPulling="2025-12-03 16:38:10.61614867 +0000 UTC m=+1187.535485093" observedRunningTime="2025-12-03 16:38:13.219084191 +0000 UTC m=+1190.138420614" watchObservedRunningTime="2025-12-03 16:38:13.224012175 +0000 UTC m=+1190.143348598" Dec 03 16:38:19 crc kubenswrapper[4768]: I1203 16:38:19.332789 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-s6lpx" Dec 03 16:38:19 crc kubenswrapper[4768]: I1203 16:38:19.453847 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-n2qpd" Dec 03 16:38:19 crc kubenswrapper[4768]: I1203 16:38:19.542661 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5b6647b8f8-ztfkl" Dec 03 16:38:19 crc kubenswrapper[4768]: I1203 16:38:19.639259 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d2m42" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.680891 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-b8m4l"] Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.682446 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.687676 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.687885 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-5l8j2" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.688041 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.693246 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.708672 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-b8m4l"] Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.717440 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14e8136-cab7-4ee6-a42d-a08418aa617d-config\") pod \"dnsmasq-dns-675f4bcbfc-b8m4l\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.717495 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqhvn\" (UniqueName: \"kubernetes.io/projected/b14e8136-cab7-4ee6-a42d-a08418aa617d-kube-api-access-rqhvn\") pod \"dnsmasq-dns-675f4bcbfc-b8m4l\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.752726 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-z27c8"] Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.754867 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.756771 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.770975 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-z27c8"] Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.819320 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lxqd\" (UniqueName: \"kubernetes.io/projected/1c7ef903-af95-4d7e-b379-037c65c8fd4e-kube-api-access-6lxqd\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.819371 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14e8136-cab7-4ee6-a42d-a08418aa617d-config\") pod \"dnsmasq-dns-675f4bcbfc-b8m4l\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.819416 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqhvn\" (UniqueName: \"kubernetes.io/projected/b14e8136-cab7-4ee6-a42d-a08418aa617d-kube-api-access-rqhvn\") pod \"dnsmasq-dns-675f4bcbfc-b8m4l\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.819458 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.819506 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-config\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.820330 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14e8136-cab7-4ee6-a42d-a08418aa617d-config\") pod \"dnsmasq-dns-675f4bcbfc-b8m4l\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.836622 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqhvn\" (UniqueName: \"kubernetes.io/projected/b14e8136-cab7-4ee6-a42d-a08418aa617d-kube-api-access-rqhvn\") pod \"dnsmasq-dns-675f4bcbfc-b8m4l\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.920890 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-config\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.921728 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-config\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.921850 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lxqd\" (UniqueName: \"kubernetes.io/projected/1c7ef903-af95-4d7e-b379-037c65c8fd4e-kube-api-access-6lxqd\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.921925 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.922460 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.950340 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lxqd\" (UniqueName: \"kubernetes.io/projected/1c7ef903-af95-4d7e-b379-037c65c8fd4e-kube-api-access-6lxqd\") pod \"dnsmasq-dns-78dd6ddcc-z27c8\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:38 crc kubenswrapper[4768]: I1203 16:38:38.997589 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:39 crc kubenswrapper[4768]: I1203 16:38:39.071884 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:39 crc kubenswrapper[4768]: I1203 16:38:39.445141 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-b8m4l"] Dec 03 16:38:39 crc kubenswrapper[4768]: I1203 16:38:39.516220 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-z27c8"] Dec 03 16:38:39 crc kubenswrapper[4768]: W1203 16:38:39.518448 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c7ef903_af95_4d7e_b379_037c65c8fd4e.slice/crio-454b36d47ab327bb2c5ab39b6b7021025d47cf135466c8e3e0d32efc515eba1d WatchSource:0}: Error finding container 454b36d47ab327bb2c5ab39b6b7021025d47cf135466c8e3e0d32efc515eba1d: Status 404 returned error can't find the container with id 454b36d47ab327bb2c5ab39b6b7021025d47cf135466c8e3e0d32efc515eba1d Dec 03 16:38:40 crc kubenswrapper[4768]: I1203 16:38:40.402298 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" event={"ID":"b14e8136-cab7-4ee6-a42d-a08418aa617d","Type":"ContainerStarted","Data":"b1ca21932b4e4d8334391451dd563bc083d101288f720e154460e871071fe9ff"} Dec 03 16:38:40 crc kubenswrapper[4768]: I1203 16:38:40.404465 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" event={"ID":"1c7ef903-af95-4d7e-b379-037c65c8fd4e","Type":"ContainerStarted","Data":"454b36d47ab327bb2c5ab39b6b7021025d47cf135466c8e3e0d32efc515eba1d"} Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.475611 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-b8m4l"] Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.497796 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q2vl9"] Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.499294 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.508755 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q2vl9"] Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.558939 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-config\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.559284 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.559335 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzfzk\" (UniqueName: \"kubernetes.io/projected/94979b00-3879-404c-82fa-98d29d8b0a2c-kube-api-access-pzfzk\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.660236 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzfzk\" (UniqueName: \"kubernetes.io/projected/94979b00-3879-404c-82fa-98d29d8b0a2c-kube-api-access-pzfzk\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.660298 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-config\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.660358 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.661288 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-dns-svc\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.661337 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-config\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.700638 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzfzk\" (UniqueName: \"kubernetes.io/projected/94979b00-3879-404c-82fa-98d29d8b0a2c-kube-api-access-pzfzk\") pod \"dnsmasq-dns-666b6646f7-q2vl9\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.747945 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-z27c8"] Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.800712 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7rr8d"] Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.806457 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7rr8d"] Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.806542 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.822740 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.865820 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-config\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.865869 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5vkj\" (UniqueName: \"kubernetes.io/projected/0e784157-56f0-4c49-a361-4ffc9edecf2b-kube-api-access-h5vkj\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.865904 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.967225 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.967655 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-config\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.967715 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5vkj\" (UniqueName: \"kubernetes.io/projected/0e784157-56f0-4c49-a361-4ffc9edecf2b-kube-api-access-h5vkj\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.968751 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-config\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.968812 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:41 crc kubenswrapper[4768]: I1203 16:38:41.994090 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5vkj\" (UniqueName: \"kubernetes.io/projected/0e784157-56f0-4c49-a361-4ffc9edecf2b-kube-api-access-h5vkj\") pod \"dnsmasq-dns-57d769cc4f-7rr8d\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.129270 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.330329 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q2vl9"] Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.432363 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7rr8d"] Dec 03 16:38:42 crc kubenswrapper[4768]: W1203 16:38:42.436855 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e784157_56f0_4c49_a361_4ffc9edecf2b.slice/crio-309f2e9c3414b100cd3ba464bf90abf2509d4b14fbac182f7f5490d61a108c17 WatchSource:0}: Error finding container 309f2e9c3414b100cd3ba464bf90abf2509d4b14fbac182f7f5490d61a108c17: Status 404 returned error can't find the container with id 309f2e9c3414b100cd3ba464bf90abf2509d4b14fbac182f7f5490d61a108c17 Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.450028 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" event={"ID":"0e784157-56f0-4c49-a361-4ffc9edecf2b","Type":"ContainerStarted","Data":"309f2e9c3414b100cd3ba464bf90abf2509d4b14fbac182f7f5490d61a108c17"} Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.451446 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" event={"ID":"94979b00-3879-404c-82fa-98d29d8b0a2c","Type":"ContainerStarted","Data":"2a09af584efd69942ed79296e462d20a8bbeab80c427e7e9bfcb1f79008446c4"} Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.646806 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.648016 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.657918 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.658208 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.658344 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.658549 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.658690 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.658790 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-x2hkw" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.659200 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.672427 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.784748 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgtjd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-kube-api-access-qgtjd\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.784804 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.784835 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.784868 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-config-data\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.784896 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.784913 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.784935 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.785311 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.785398 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.785417 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.785455 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886693 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886759 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886775 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886799 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886836 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgtjd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-kube-api-access-qgtjd\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886854 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886878 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886907 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-config-data\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886929 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886942 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.886960 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.888282 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.888631 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-config-data\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.888712 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.889429 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.889456 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/34f8a2e8feba8e1195031a4c36493954fd2d6f8faf48f707af875a92cd7c2a85/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.889791 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.889818 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.891843 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.891915 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.892106 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.892715 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.913277 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgtjd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-kube-api-access-qgtjd\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.913539 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.917076 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.919081 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.919227 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.919333 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-54tnc" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.919429 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.919565 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.919726 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.920386 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.928379 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.929734 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:38:42 crc kubenswrapper[4768]: I1203 16:38:42.986828 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.088961 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089002 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5061b6be-b64d-4dfd-8431-701066b8cefa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089026 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5061b6be-b64d-4dfd-8431-701066b8cefa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089053 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089079 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089107 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089197 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089227 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089275 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089317 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np78t\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-kube-api-access-np78t\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.089381 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192328 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192392 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192407 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192429 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192449 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np78t\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-kube-api-access-np78t\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192497 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192531 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192549 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5061b6be-b64d-4dfd-8431-701066b8cefa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192566 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5061b6be-b64d-4dfd-8431-701066b8cefa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192609 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.192635 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.193544 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.194145 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.194272 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.194861 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.195831 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.198937 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.199163 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/989aecef5bc372eec88750a42cc238896432583c1ff4ea1eb5b415eecee9303d/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.200398 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.202141 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.205631 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5061b6be-b64d-4dfd-8431-701066b8cefa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.221578 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5061b6be-b64d-4dfd-8431-701066b8cefa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.222036 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np78t\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-kube-api-access-np78t\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.234925 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:43 crc kubenswrapper[4768]: I1203 16:38:43.271171 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.431160 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.432416 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.436431 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-g55gx" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.436971 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.438516 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.438698 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.456039 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.457156 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617338 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-config-data-default\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617400 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-kolla-config\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617459 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617535 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3dc1084-f02c-45ff-87de-22a8818905b4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617585 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617626 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dc1084-f02c-45ff-87de-22a8818905b4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617678 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dntns\" (UniqueName: \"kubernetes.io/projected/c3dc1084-f02c-45ff-87de-22a8818905b4-kube-api-access-dntns\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.617702 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c3dc1084-f02c-45ff-87de-22a8818905b4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.720062 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dntns\" (UniqueName: \"kubernetes.io/projected/c3dc1084-f02c-45ff-87de-22a8818905b4-kube-api-access-dntns\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.720103 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c3dc1084-f02c-45ff-87de-22a8818905b4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.720129 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-config-data-default\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.720150 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-kolla-config\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.720207 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.721368 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c3dc1084-f02c-45ff-87de-22a8818905b4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.721703 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-kolla-config\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.722075 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-config-data-default\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.722249 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3dc1084-f02c-45ff-87de-22a8818905b4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.722316 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.722339 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dc1084-f02c-45ff-87de-22a8818905b4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.722924 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.722953 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/83f54f921ad0954c799cd98059b6c13d08a0e0fb86e158e02f9e0b675ff616bc/globalmount\"" pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.724468 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c3dc1084-f02c-45ff-87de-22a8818905b4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.730834 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3dc1084-f02c-45ff-87de-22a8818905b4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.731474 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3dc1084-f02c-45ff-87de-22a8818905b4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.743953 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dntns\" (UniqueName: \"kubernetes.io/projected/c3dc1084-f02c-45ff-87de-22a8818905b4-kube-api-access-dntns\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:44 crc kubenswrapper[4768]: I1203 16:38:44.792048 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ab922eb6-f475-425c-a3c6-9f389bdfabb1\") pod \"openstack-galera-0\" (UID: \"c3dc1084-f02c-45ff-87de-22a8818905b4\") " pod="openstack/openstack-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.061488 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.844730 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.846202 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.848995 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-lv2gq" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.851790 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.851984 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.852140 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.856579 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945156 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945238 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945291 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6d3b77-fbdf-4dfd-b473-3e8288367442-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945337 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945362 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nzlc\" (UniqueName: \"kubernetes.io/projected/3f6d3b77-fbdf-4dfd-b473-3e8288367442-kube-api-access-8nzlc\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945387 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945425 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f6d3b77-fbdf-4dfd-b473-3e8288367442-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:45 crc kubenswrapper[4768]: I1203 16:38:45.945466 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3f6d3b77-fbdf-4dfd-b473-3e8288367442-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047307 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047351 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nzlc\" (UniqueName: \"kubernetes.io/projected/3f6d3b77-fbdf-4dfd-b473-3e8288367442-kube-api-access-8nzlc\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047376 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047407 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f6d3b77-fbdf-4dfd-b473-3e8288367442-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047437 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3f6d3b77-fbdf-4dfd-b473-3e8288367442-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047480 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047515 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.047549 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6d3b77-fbdf-4dfd-b473-3e8288367442-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.048506 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3f6d3b77-fbdf-4dfd-b473-3e8288367442-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.048852 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.048970 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.049320 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3f6d3b77-fbdf-4dfd-b473-3e8288367442-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.053092 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f6d3b77-fbdf-4dfd-b473-3e8288367442-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.053627 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f6d3b77-fbdf-4dfd-b473-3e8288367442-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.072208 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.072259 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/c3c1000124193de4b4476bb54cb44f5015f5f78693d99e754085c7da444cf70c/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.075763 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.076794 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.081704 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-vcb49" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.081921 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.082436 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.099288 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nzlc\" (UniqueName: \"kubernetes.io/projected/3f6d3b77-fbdf-4dfd-b473-3e8288367442-kube-api-access-8nzlc\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.121610 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.148590 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c42c7f90-2ae1-4222-864c-b8f7f1733beb-config-data\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.148689 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c42c7f90-2ae1-4222-864c-b8f7f1733beb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.148717 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wdqqh\" (UniqueName: \"kubernetes.io/projected/c42c7f90-2ae1-4222-864c-b8f7f1733beb-kube-api-access-wdqqh\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.148736 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c42c7f90-2ae1-4222-864c-b8f7f1733beb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.148764 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42c7f90-2ae1-4222-864c-b8f7f1733beb-kolla-config\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.156856 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b482bc3a-c1e2-433a-b14a-c4775014a853\") pod \"openstack-cell1-galera-0\" (UID: \"3f6d3b77-fbdf-4dfd-b473-3e8288367442\") " pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.226075 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.251777 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wdqqh\" (UniqueName: \"kubernetes.io/projected/c42c7f90-2ae1-4222-864c-b8f7f1733beb-kube-api-access-wdqqh\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.251820 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c42c7f90-2ae1-4222-864c-b8f7f1733beb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.251857 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42c7f90-2ae1-4222-864c-b8f7f1733beb-kolla-config\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.251906 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c42c7f90-2ae1-4222-864c-b8f7f1733beb-config-data\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.251958 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c42c7f90-2ae1-4222-864c-b8f7f1733beb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.252976 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42c7f90-2ae1-4222-864c-b8f7f1733beb-kolla-config\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.257085 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c42c7f90-2ae1-4222-864c-b8f7f1733beb-config-data\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.261743 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/c42c7f90-2ae1-4222-864c-b8f7f1733beb-memcached-tls-certs\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.269510 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c42c7f90-2ae1-4222-864c-b8f7f1733beb-combined-ca-bundle\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.278187 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wdqqh\" (UniqueName: \"kubernetes.io/projected/c42c7f90-2ae1-4222-864c-b8f7f1733beb-kube-api-access-wdqqh\") pod \"memcached-0\" (UID: \"c42c7f90-2ae1-4222-864c-b8f7f1733beb\") " pod="openstack/memcached-0" Dec 03 16:38:46 crc kubenswrapper[4768]: I1203 16:38:46.499224 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 03 16:38:47 crc kubenswrapper[4768]: I1203 16:38:47.729340 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:38:47 crc kubenswrapper[4768]: I1203 16:38:47.730404 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:38:47 crc kubenswrapper[4768]: I1203 16:38:47.734865 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-xrdfr" Dec 03 16:38:47 crc kubenswrapper[4768]: I1203 16:38:47.743054 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:38:47 crc kubenswrapper[4768]: I1203 16:38:47.887122 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk5th\" (UniqueName: \"kubernetes.io/projected/f82fe5c1-e056-4fbb-bca3-7552b85daa9b-kube-api-access-wk5th\") pod \"kube-state-metrics-0\" (UID: \"f82fe5c1-e056-4fbb-bca3-7552b85daa9b\") " pod="openstack/kube-state-metrics-0" Dec 03 16:38:47 crc kubenswrapper[4768]: I1203 16:38:47.988859 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk5th\" (UniqueName: \"kubernetes.io/projected/f82fe5c1-e056-4fbb-bca3-7552b85daa9b-kube-api-access-wk5th\") pod \"kube-state-metrics-0\" (UID: \"f82fe5c1-e056-4fbb-bca3-7552b85daa9b\") " pod="openstack/kube-state-metrics-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.006358 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk5th\" (UniqueName: \"kubernetes.io/projected/f82fe5c1-e056-4fbb-bca3-7552b85daa9b-kube-api-access-wk5th\") pod \"kube-state-metrics-0\" (UID: \"f82fe5c1-e056-4fbb-bca3-7552b85daa9b\") " pod="openstack/kube-state-metrics-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.072349 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.295361 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.297012 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.298477 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.298985 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.299018 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.298997 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-x94gz" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.299879 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.322114 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.394138 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.394220 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.394259 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.394280 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2pwb\" (UniqueName: \"kubernetes.io/projected/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-kube-api-access-j2pwb\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.394299 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.394331 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.394348 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.495149 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.495209 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.495226 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2pwb\" (UniqueName: \"kubernetes.io/projected/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-kube-api-access-j2pwb\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.495245 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.495276 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.495293 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.495340 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.496508 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.498868 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.499347 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.499557 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.508989 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.523697 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2pwb\" (UniqueName: \"kubernetes.io/projected/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-kube-api-access-j2pwb\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.525825 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f9cb5864-2bfb-49c0-8124-b11beb8fdad1-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"f9cb5864-2bfb-49c0-8124-b11beb8fdad1\") " pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.616539 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.974162 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.976031 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.979099 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.979398 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.979449 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-mvn59" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.979627 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.980055 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.980245 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 16:38:48 crc kubenswrapper[4768]: I1203 16:38:48.996091 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135136 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/11cf6253-a0c6-4968-8bf9-3900aec31852-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135196 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvkvl\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-kube-api-access-qvkvl\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135249 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135279 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135313 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-config\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135373 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135432 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/11cf6253-a0c6-4968-8bf9-3900aec31852-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.135475 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.236337 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.236673 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/11cf6253-a0c6-4968-8bf9-3900aec31852-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.236766 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvkvl\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-kube-api-access-qvkvl\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.236862 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.236992 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.237100 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-config\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.237235 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.237367 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/11cf6253-a0c6-4968-8bf9-3900aec31852-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.241469 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/11cf6253-a0c6-4968-8bf9-3900aec31852-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.241985 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/11cf6253-a0c6-4968-8bf9-3900aec31852-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.242172 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.242199 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bc62c60cce49c9fe1f0c4e1ad5989a7b6836177e41a5fab7867a183c10310e2f/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.242864 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-config\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.242956 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.243345 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.247317 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.277013 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvkvl\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-kube-api-access-qvkvl\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.293818 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:49 crc kubenswrapper[4768]: I1203 16:38:49.336242 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.261800 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.263705 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.269156 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.269620 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-jvcxp" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.278335 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.278485 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.278713 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.278882 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.414289 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.414338 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.414384 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-config\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.414556 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.414724 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.414868 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.415001 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf6vf\" (UniqueName: \"kubernetes.io/projected/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-kube-api-access-sf6vf\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.415076 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516149 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516215 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516249 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf6vf\" (UniqueName: \"kubernetes.io/projected/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-kube-api-access-sf6vf\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516284 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516312 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516329 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516358 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-config\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.516384 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.517701 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.518114 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-config\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.519672 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.521994 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.522696 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.522926 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.522956 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a54cf3d02cb5b2eabbe41fc5ac0d85fff460eb44f798e31a086d2b2dd01d031c/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.528442 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.560759 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-39c86e45-48a1-490f-ac15-404a5ecdc0b0\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.570279 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf6vf\" (UniqueName: \"kubernetes.io/projected/a0d45532-8a91-4fa5-a7b5-21fdcf44160e-kube-api-access-sf6vf\") pod \"ovsdbserver-nb-0\" (UID: \"a0d45532-8a91-4fa5-a7b5-21fdcf44160e\") " pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.591934 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.825655 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-6bhgk"] Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.826885 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.828911 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.831158 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.831398 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-zfnqg" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.834251 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-hjhg9"] Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.836285 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.853355 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hjhg9"] Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.860341 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6bhgk"] Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.923974 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-log\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924025 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/87225d49-4f3c-44e3-a05d-feee87a94114-ovn-controller-tls-certs\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924045 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dc8z7\" (UniqueName: \"kubernetes.io/projected/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-kube-api-access-dc8z7\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924065 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-run\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924082 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87225d49-4f3c-44e3-a05d-feee87a94114-combined-ca-bundle\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924096 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-scripts\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924122 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9plk\" (UniqueName: \"kubernetes.io/projected/87225d49-4f3c-44e3-a05d-feee87a94114-kube-api-access-s9plk\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924158 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-lib\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924185 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87225d49-4f3c-44e3-a05d-feee87a94114-scripts\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924222 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-run\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924248 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-run-ovn\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924270 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-etc-ovs\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:53 crc kubenswrapper[4768]: I1203 16:38:53.924293 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-log-ovn\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025507 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-run-ovn\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025806 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-etc-ovs\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025831 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-log-ovn\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025853 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-log\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025879 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/87225d49-4f3c-44e3-a05d-feee87a94114-ovn-controller-tls-certs\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025895 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dc8z7\" (UniqueName: \"kubernetes.io/projected/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-kube-api-access-dc8z7\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025916 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-run\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025931 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87225d49-4f3c-44e3-a05d-feee87a94114-combined-ca-bundle\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025950 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-scripts\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.025978 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9plk\" (UniqueName: \"kubernetes.io/projected/87225d49-4f3c-44e3-a05d-feee87a94114-kube-api-access-s9plk\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026018 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-lib\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026043 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87225d49-4f3c-44e3-a05d-feee87a94114-scripts\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026078 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-run\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026532 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-run\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026640 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-run-ovn\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026758 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-etc-ovs\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026862 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-lib\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026859 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-run\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.026975 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/87225d49-4f3c-44e3-a05d-feee87a94114-var-log-ovn\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.027107 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-var-log\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.028844 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-scripts\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.029129 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/87225d49-4f3c-44e3-a05d-feee87a94114-scripts\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.032339 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/87225d49-4f3c-44e3-a05d-feee87a94114-ovn-controller-tls-certs\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.032487 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87225d49-4f3c-44e3-a05d-feee87a94114-combined-ca-bundle\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.046221 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dc8z7\" (UniqueName: \"kubernetes.io/projected/8e6b671c-fd9d-438c-9c0c-9db70f4a63ca-kube-api-access-dc8z7\") pod \"ovn-controller-ovs-hjhg9\" (UID: \"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca\") " pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.052860 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9plk\" (UniqueName: \"kubernetes.io/projected/87225d49-4f3c-44e3-a05d-feee87a94114-kube-api-access-s9plk\") pod \"ovn-controller-6bhgk\" (UID: \"87225d49-4f3c-44e3-a05d-feee87a94114\") " pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.149849 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6bhgk" Dec 03 16:38:54 crc kubenswrapper[4768]: I1203 16:38:54.155102 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.083698 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.084986 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.088046 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.088475 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.088655 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.088919 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.090350 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-w975w" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.100827 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.159751 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.159835 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxbmq\" (UniqueName: \"kubernetes.io/projected/fb71d5cf-3561-4f62-a0c0-980ae81ab050-kube-api-access-sxbmq\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.159875 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.159926 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb71d5cf-3561-4f62-a0c0-980ae81ab050-config\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.159953 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.261297 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.261343 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxbmq\" (UniqueName: \"kubernetes.io/projected/fb71d5cf-3561-4f62-a0c0-980ae81ab050-kube-api-access-sxbmq\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.261367 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.261389 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb71d5cf-3561-4f62-a0c0-980ae81ab050-config\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.261417 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.262409 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.262447 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fb71d5cf-3561-4f62-a0c0-980ae81ab050-config\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.268419 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.269931 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.270977 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.272213 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/fb71d5cf-3561-4f62-a0c0-980ae81ab050-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.275886 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-loki-s3" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.275992 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.276144 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.285984 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxbmq\" (UniqueName: \"kubernetes.io/projected/fb71d5cf-3561-4f62-a0c0-980ae81ab050-kube-api-access-sxbmq\") pod \"cloudkitty-lokistack-distributor-56cd74f89f-x5fb7\" (UID: \"fb71d5cf-3561-4f62-a0c0-980ae81ab050\") " pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.295351 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.363488 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.363566 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bbec9d6-615c-4007-b056-19ead8728139-config\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.363616 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.363639 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.363694 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9lpj\" (UniqueName: \"kubernetes.io/projected/5bbec9d6-615c-4007-b056-19ead8728139-kube-api-access-p9lpj\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.363740 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.426294 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.427467 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.431255 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.431782 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.448194 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.456353 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.465859 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-config\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.465908 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.465946 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.465974 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwmxd\" (UniqueName: \"kubernetes.io/projected/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-kube-api-access-fwmxd\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.465993 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bbec9d6-615c-4007-b056-19ead8728139-config\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.466024 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.466044 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.466069 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.466104 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9lpj\" (UniqueName: \"kubernetes.io/projected/5bbec9d6-615c-4007-b056-19ead8728139-kube-api-access-p9lpj\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.466123 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.466153 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.467441 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bbec9d6-615c-4007-b056-19ead8728139-config\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.468033 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.483571 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.490346 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.497345 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/5bbec9d6-615c-4007-b056-19ead8728139-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.503136 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9lpj\" (UniqueName: \"kubernetes.io/projected/5bbec9d6-615c-4007-b056-19ead8728139-kube-api-access-p9lpj\") pod \"cloudkitty-lokistack-querier-548665d79b-qvtkc\" (UID: \"5bbec9d6-615c-4007-b056-19ead8728139\") " pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.568019 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-config\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.568111 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.568163 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwmxd\" (UniqueName: \"kubernetes.io/projected/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-kube-api-access-fwmxd\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.568218 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.568287 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.570913 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.571907 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-config\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.579245 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.580281 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.611586 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwmxd\" (UniqueName: \"kubernetes.io/projected/7fed8740-2999-4b8f-bd2a-2bdfea8f03a5-kube-api-access-fwmxd\") pod \"cloudkitty-lokistack-query-frontend-779849886d-xc5w8\" (UID: \"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5\") " pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.647968 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.708992 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.714603 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.726946 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.727174 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.727195 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.727462 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.727930 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.728075 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.731195 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.732419 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.745052 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-9ctsf" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.760928 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.763266 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.774989 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kt28\" (UniqueName: \"kubernetes.io/projected/b9983072-bd22-4145-a740-6f479db8e8fd-kube-api-access-9kt28\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775042 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775065 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775086 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775101 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775118 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775209 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brwhj\" (UniqueName: \"kubernetes.io/projected/83f9f0ed-f17f-4e94-bcc7-5108489ea003-kube-api-access-brwhj\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775227 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775257 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775279 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775311 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775339 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775357 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775388 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775414 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775443 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775466 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.775672 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.777083 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q"] Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.876771 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.876830 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.876863 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877035 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877087 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kt28\" (UniqueName: \"kubernetes.io/projected/b9983072-bd22-4145-a740-6f479db8e8fd-kube-api-access-9kt28\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877131 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877164 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877191 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877207 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877221 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877248 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brwhj\" (UniqueName: \"kubernetes.io/projected/83f9f0ed-f17f-4e94-bcc7-5108489ea003-kube-api-access-brwhj\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877269 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877293 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877325 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877389 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.877438 4768 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.877499 4768 configmap.go:193] Couldn't get configMap openstack/cloudkitty-lokistack-gateway-ca-bundle: configmap "cloudkitty-lokistack-gateway-ca-bundle" not found Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877829 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877886 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.877444 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.878179 4768 secret.go:188] Couldn't get secret openstack/cloudkitty-lokistack-gateway-http: secret "cloudkitty-lokistack-gateway-http" not found Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.877500 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tls-secret podName:83f9f0ed-f17f-4e94-bcc7-5108489ea003 nodeName:}" failed. No retries permitted until 2025-12-03 16:38:57.377484272 +0000 UTC m=+1234.296820695 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tls-secret") pod "cloudkitty-lokistack-gateway-76cc998948-55lmd" (UID: "83f9f0ed-f17f-4e94-bcc7-5108489ea003") : secret "cloudkitty-lokistack-gateway-http" not found Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.878229 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.878248 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-ca-bundle podName:83f9f0ed-f17f-4e94-bcc7-5108489ea003 nodeName:}" failed. No retries permitted until 2025-12-03 16:38:57.378231642 +0000 UTC m=+1234.297568065 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cloudkitty-lokistack-gateway-ca-bundle" (UniqueName: "kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-ca-bundle") pod "cloudkitty-lokistack-gateway-76cc998948-55lmd" (UID: "83f9f0ed-f17f-4e94-bcc7-5108489ea003") : configmap "cloudkitty-lokistack-gateway-ca-bundle" not found Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.878274 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.878331 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.878414 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tls-secret podName:b9983072-bd22-4145-a740-6f479db8e8fd nodeName:}" failed. No retries permitted until 2025-12-03 16:38:57.378399496 +0000 UTC m=+1234.297735919 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-secret" (UniqueName: "kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tls-secret") pod "cloudkitty-lokistack-gateway-76cc998948-jhn5q" (UID: "b9983072-bd22-4145-a740-6f479db8e8fd") : secret "cloudkitty-lokistack-gateway-http" not found Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.878896 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.879004 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.879120 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-rbac\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.879571 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.879629 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.881101 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.881511 4768 configmap.go:193] Couldn't get configMap openstack/cloudkitty-lokistack-gateway-ca-bundle: configmap "cloudkitty-lokistack-gateway-ca-bundle" not found Dec 03 16:38:56 crc kubenswrapper[4768]: E1203 16:38:56.881581 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-ca-bundle podName:b9983072-bd22-4145-a740-6f479db8e8fd nodeName:}" failed. No retries permitted until 2025-12-03 16:38:57.381563581 +0000 UTC m=+1234.300900004 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cloudkitty-lokistack-gateway-ca-bundle" (UniqueName: "kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-ca-bundle") pod "cloudkitty-lokistack-gateway-76cc998948-jhn5q" (UID: "b9983072-bd22-4145-a740-6f479db8e8fd") : configmap "cloudkitty-lokistack-gateway-ca-bundle" not found Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.881739 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.883352 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tenants\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.884979 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.893035 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brwhj\" (UniqueName: \"kubernetes.io/projected/83f9f0ed-f17f-4e94-bcc7-5108489ea003-kube-api-access-brwhj\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:56 crc kubenswrapper[4768]: I1203 16:38:56.896877 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kt28\" (UniqueName: \"kubernetes.io/projected/b9983072-bd22-4145-a740-6f479db8e8fd-kube-api-access-9kt28\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.079437 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.081483 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.083645 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.083650 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-j92wf" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.083806 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.084411 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.087937 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184001 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184063 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184178 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fshql\" (UniqueName: \"kubernetes.io/projected/cc4d3013-515d-4eb0-a20e-735bcdbed9db-kube-api-access-fshql\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184222 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184271 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4d3013-515d-4eb0-a20e-735bcdbed9db-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184301 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4d3013-515d-4eb0-a20e-735bcdbed9db-config\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184331 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc4d3013-515d-4eb0-a20e-735bcdbed9db-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.184368 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: E1203 16:38:57.238819 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 16:38:57 crc kubenswrapper[4768]: E1203 16:38:57.239037 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rqhvn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-b8m4l_openstack(b14e8136-cab7-4ee6-a42d-a08418aa617d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:38:57 crc kubenswrapper[4768]: E1203 16:38:57.240432 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" podUID="b14e8136-cab7-4ee6-a42d-a08418aa617d" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.285861 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.285923 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.285985 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.286001 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fshql\" (UniqueName: \"kubernetes.io/projected/cc4d3013-515d-4eb0-a20e-735bcdbed9db-kube-api-access-fshql\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.286176 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.286230 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4d3013-515d-4eb0-a20e-735bcdbed9db-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.286265 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4d3013-515d-4eb0-a20e-735bcdbed9db-config\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.286295 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc4d3013-515d-4eb0-a20e-735bcdbed9db-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.286339 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.286782 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cc4d3013-515d-4eb0-a20e-735bcdbed9db-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.287317 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc4d3013-515d-4eb0-a20e-735bcdbed9db-config\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.287409 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4d3013-515d-4eb0-a20e-735bcdbed9db-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.291959 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.292140 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.292210 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.292255 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4d3013-515d-4eb0-a20e-735bcdbed9db-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.298447 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.298730 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.298773 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.298769 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b7a44a4a44756339fdba98073cf6c3c101cc73d9ca231b357ccf5d80e0a446e9/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.302152 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.313225 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fshql\" (UniqueName: \"kubernetes.io/projected/cc4d3013-515d-4eb0-a20e-735bcdbed9db-kube-api-access-fshql\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: E1203 16:38:57.322781 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 03 16:38:57 crc kubenswrapper[4768]: E1203 16:38:57.322986 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6lxqd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-z27c8_openstack(1c7ef903-af95-4d7e-b379-037c65c8fd4e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:38:57 crc kubenswrapper[4768]: E1203 16:38:57.324476 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" podUID="1c7ef903-af95-4d7e-b379-037c65c8fd4e" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.354727 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.355966 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.358661 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.358884 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.364825 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-18d4e6be-d322-4b21-ad0a-65fd485600f7\") pod \"ovsdbserver-sb-0\" (UID: \"cc4d3013-515d-4eb0-a20e-735bcdbed9db\") " pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388564 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388629 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388756 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388812 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6j6f\" (UniqueName: \"kubernetes.io/projected/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-kube-api-access-j6j6f\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388863 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388892 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388925 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.388978 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389017 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389077 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389099 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08387864-260c-4260-bf37-e878d9207c7d-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389129 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389164 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389282 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389347 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84qmr\" (UniqueName: \"kubernetes.io/projected/08387864-260c-4260-bf37-e878d9207c7d-kube-api-access-84qmr\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389374 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389509 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389568 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.389680 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.390551 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.391358 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9983072-bd22-4145-a740-6f479db8e8fd-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.392358 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/83f9f0ed-f17f-4e94-bcc7-5108489ea003-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.399811 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b9983072-bd22-4145-a740-6f479db8e8fd-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-jhn5q\" (UID: \"b9983072-bd22-4145-a740-6f479db8e8fd\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.400230 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/83f9f0ed-f17f-4e94-bcc7-5108489ea003-tls-secret\") pod \"cloudkitty-lokistack-gateway-76cc998948-55lmd\" (UID: \"83f9f0ed-f17f-4e94-bcc7-5108489ea003\") " pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.405997 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491358 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491415 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491456 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491474 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491496 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6j6f\" (UniqueName: \"kubernetes.io/projected/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-kube-api-access-j6j6f\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491517 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491535 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491559 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491582 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491627 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08387864-260c-4260-bf37-e878d9207c7d-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491644 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491664 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491733 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84qmr\" (UniqueName: \"kubernetes.io/projected/08387864-260c-4260-bf37-e878d9207c7d-kube-api-access-84qmr\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491760 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.491788 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.492940 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.493168 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.494659 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.494765 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.494920 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.495060 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.499560 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.500018 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.502956 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08387864-260c-4260-bf37-e878d9207c7d-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.505176 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.507393 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.508147 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.508768 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/08387864-260c-4260-bf37-e878d9207c7d-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.514680 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84qmr\" (UniqueName: \"kubernetes.io/projected/08387864-260c-4260-bf37-e878d9207c7d-kube-api-access-84qmr\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.516851 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6j6f\" (UniqueName: \"kubernetes.io/projected/5c0ad451-c513-4f94-ac08-aaa2c7df9ae8-kube-api-access-j6j6f\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.579211 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.583082 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"08387864-260c-4260-bf37-e878d9207c7d\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.595762 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.614481 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.616857 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.623533 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.624382 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.639898 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.660568 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.662491 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.682180 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.687303 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.782855 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.811025 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.811399 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.811643 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.811792 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cec597c-1827-4712-b016-5c7cfc55c585-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.811822 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k2h4\" (UniqueName: \"kubernetes.io/projected/8cec597c-1827-4712-b016-5c7cfc55c585-kube-api-access-7k2h4\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.811937 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.811992 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.913536 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.916002 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.916094 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.916113 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.916150 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.916226 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cec597c-1827-4712-b016-5c7cfc55c585-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.916247 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k2h4\" (UniqueName: \"kubernetes.io/projected/8cec597c-1827-4712-b016-5c7cfc55c585-kube-api-access-7k2h4\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.914704 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.917158 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.924911 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.925207 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.932299 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/8cec597c-1827-4712-b016-5c7cfc55c585-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.933460 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cec597c-1827-4712-b016-5c7cfc55c585-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.967028 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k2h4\" (UniqueName: \"kubernetes.io/projected/8cec597c-1827-4712-b016-5c7cfc55c585-kube-api-access-7k2h4\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:57 crc kubenswrapper[4768]: I1203 16:38:57.988685 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.000837 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8cec597c-1827-4712-b016-5c7cfc55c585\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:58 crc kubenswrapper[4768]: W1203 16:38:58.178449 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b8fcf68_a566_4dc2_9137_4b1e85eede0f.slice/crio-0778e7b2e0a6e3eec1d43a83f1021c0760c71db6b1355b61b4334497658d853a WatchSource:0}: Error finding container 0778e7b2e0a6e3eec1d43a83f1021c0760c71db6b1355b61b4334497658d853a: Status 404 returned error can't find the container with id 0778e7b2e0a6e3eec1d43a83f1021c0760c71db6b1355b61b4334497658d853a Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.248066 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.406771 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.502800 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.525204 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lxqd\" (UniqueName: \"kubernetes.io/projected/1c7ef903-af95-4d7e-b379-037c65c8fd4e-kube-api-access-6lxqd\") pod \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.525319 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-dns-svc\") pod \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.525499 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-config\") pod \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\" (UID: \"1c7ef903-af95-4d7e-b379-037c65c8fd4e\") " Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.526764 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-config" (OuterVolumeSpecName: "config") pod "1c7ef903-af95-4d7e-b379-037c65c8fd4e" (UID: "1c7ef903-af95-4d7e-b379-037c65c8fd4e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.534309 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1c7ef903-af95-4d7e-b379-037c65c8fd4e" (UID: "1c7ef903-af95-4d7e-b379-037c65c8fd4e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.541684 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.549113 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c7ef903-af95-4d7e-b379-037c65c8fd4e-kube-api-access-6lxqd" (OuterVolumeSpecName: "kube-api-access-6lxqd") pod "1c7ef903-af95-4d7e-b379-037c65c8fd4e" (UID: "1c7ef903-af95-4d7e-b379-037c65c8fd4e"). InnerVolumeSpecName "kube-api-access-6lxqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.551692 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.590208 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.642619 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lxqd\" (UniqueName: \"kubernetes.io/projected/1c7ef903-af95-4d7e-b379-037c65c8fd4e-kube-api-access-6lxqd\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.642657 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.642667 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c7ef903-af95-4d7e-b379-037c65c8fd4e-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.654675 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.662797 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.666866 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f9cb5864-2bfb-49c0-8124-b11beb8fdad1","Type":"ContainerStarted","Data":"057bbcd2a5fc599b027cc8c859cdb4a5ec67eef4e4bce639b26f7094ed86e0fe"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.668721 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.670512 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" event={"ID":"b14e8136-cab7-4ee6-a42d-a08418aa617d","Type":"ContainerDied","Data":"b1ca21932b4e4d8334391451dd563bc083d101288f720e154460e871071fe9ff"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.670647 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-b8m4l" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.672543 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" event={"ID":"1c7ef903-af95-4d7e-b379-037c65c8fd4e","Type":"ContainerDied","Data":"454b36d47ab327bb2c5ab39b6b7021025d47cf135466c8e3e0d32efc515eba1d"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.672680 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-z27c8" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.678927 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0b8fcf68-a566-4dc2-9137-4b1e85eede0f","Type":"ContainerStarted","Data":"0778e7b2e0a6e3eec1d43a83f1021c0760c71db6b1355b61b4334497658d853a"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.683339 4768 generic.go:334] "Generic (PLEG): container finished" podID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerID="afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478" exitCode=0 Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.683419 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" event={"ID":"94979b00-3879-404c-82fa-98d29d8b0a2c","Type":"ContainerDied","Data":"afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.686380 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5061b6be-b64d-4dfd-8431-701066b8cefa","Type":"ContainerStarted","Data":"3a5f67b1eb54af382abc3a1a669207f11d0e9e8bb7f6da38e614b011aa8f3849"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.688551 4768 generic.go:334] "Generic (PLEG): container finished" podID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerID="2a7a84b7c943f8e380ea92a23f7ec63e51705f408613d4be8503b1c97a9ea703" exitCode=0 Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.688693 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" event={"ID":"0e784157-56f0-4c49-a361-4ffc9edecf2b","Type":"ContainerDied","Data":"2a7a84b7c943f8e380ea92a23f7ec63e51705f408613d4be8503b1c97a9ea703"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.690171 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f82fe5c1-e056-4fbb-bca3-7552b85daa9b","Type":"ContainerStarted","Data":"e2b99f1e92bacf0abbe5568fdab1e4e9962c8fc65b2999ea8a616808350f0d43"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.692022 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3dc1084-f02c-45ff-87de-22a8818905b4","Type":"ContainerStarted","Data":"f4dbcb414b088a7a92ec4c4f013b57ae000761dc7f9fbc7310ff9ccd784bc9c6"} Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.743565 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14e8136-cab7-4ee6-a42d-a08418aa617d-config\") pod \"b14e8136-cab7-4ee6-a42d-a08418aa617d\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.743632 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqhvn\" (UniqueName: \"kubernetes.io/projected/b14e8136-cab7-4ee6-a42d-a08418aa617d-kube-api-access-rqhvn\") pod \"b14e8136-cab7-4ee6-a42d-a08418aa617d\" (UID: \"b14e8136-cab7-4ee6-a42d-a08418aa617d\") " Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.745851 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b14e8136-cab7-4ee6-a42d-a08418aa617d-config" (OuterVolumeSpecName: "config") pod "b14e8136-cab7-4ee6-a42d-a08418aa617d" (UID: "b14e8136-cab7-4ee6-a42d-a08418aa617d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.750887 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b14e8136-cab7-4ee6-a42d-a08418aa617d-kube-api-access-rqhvn" (OuterVolumeSpecName: "kube-api-access-rqhvn") pod "b14e8136-cab7-4ee6-a42d-a08418aa617d" (UID: "b14e8136-cab7-4ee6-a42d-a08418aa617d"). InnerVolumeSpecName "kube-api-access-rqhvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.760982 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-z27c8"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.768341 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-z27c8"] Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.847474 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14e8136-cab7-4ee6-a42d-a08418aa617d-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:58 crc kubenswrapper[4768]: I1203 16:38:58.847497 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqhvn\" (UniqueName: \"kubernetes.io/projected/b14e8136-cab7-4ee6-a42d-a08418aa617d-kube-api-access-rqhvn\") on node \"crc\" DevicePath \"\"" Dec 03 16:38:58 crc kubenswrapper[4768]: E1203 16:38:58.903273 4768 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 03 16:38:58 crc kubenswrapper[4768]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/94979b00-3879-404c-82fa-98d29d8b0a2c/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 16:38:58 crc kubenswrapper[4768]: > podSandboxID="2a09af584efd69942ed79296e462d20a8bbeab80c427e7e9bfcb1f79008446c4" Dec 03 16:38:58 crc kubenswrapper[4768]: E1203 16:38:58.903426 4768 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 03 16:38:58 crc kubenswrapper[4768]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pzfzk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-q2vl9_openstack(94979b00-3879-404c-82fa-98d29d8b0a2c): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/94979b00-3879-404c-82fa-98d29d8b0a2c/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 03 16:38:58 crc kubenswrapper[4768]: > logger="UnhandledError" Dec 03 16:38:58 crc kubenswrapper[4768]: E1203 16:38:58.907934 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/94979b00-3879-404c-82fa-98d29d8b0a2c/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.033844 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-b8m4l"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.041281 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-b8m4l"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.264843 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6bhgk"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.274676 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.281218 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.338628 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 03 16:38:59 crc kubenswrapper[4768]: W1203 16:38:59.345689 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9983072_bd22_4145_a740_6f479db8e8fd.slice/crio-533a47935ab7008991abd5e54d3235f88f7ff7a91df196e39effdc9866bc5c69 WatchSource:0}: Error finding container 533a47935ab7008991abd5e54d3235f88f7ff7a91df196e39effdc9866bc5c69: Status 404 returned error can't find the container with id 533a47935ab7008991abd5e54d3235f88f7ff7a91df196e39effdc9866bc5c69 Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.362620 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.393007 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.434209 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.448669 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.455426 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.461661 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.466301 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.476033 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:gateway,Image:registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:710a1a5e486de5724469e55f29e9ff3f6cbef8cd4b2d21dfe254ede2b953c150,Command:[],Args:[--debug.name=lokistack-gateway --web.listen=0.0.0.0:8080 --web.internal.listen=0.0.0.0:8081 --web.healthchecks.url=https://localhost:8080 --log.level=warn --logs.read.endpoint=https://cloudkitty-lokistack-query-frontend-http.openstack.svc.cluster.local:3100 --logs.tail.endpoint=https://cloudkitty-lokistack-query-frontend-http.openstack.svc.cluster.local:3100 --logs.write.endpoint=https://cloudkitty-lokistack-distributor-http.openstack.svc.cluster.local:3100 --logs.write-timeout=4m0s --rbac.config=/etc/lokistack-gateway/rbac.yaml --tenants.config=/etc/lokistack-gateway/tenants.yaml --server.read-timeout=48s --server.write-timeout=6m0s --tls.min-version=VersionTLS12 --tls.server.cert-file=/var/run/tls/http/server/tls.crt --tls.server.key-file=/var/run/tls/http/server/tls.key --tls.healthchecks.server-ca-file=/var/run/ca/server/service-ca.crt --tls.healthchecks.server-name=cloudkitty-lokistack-gateway-http.openstack.svc.cluster.local --tls.internal.server.cert-file=/var/run/tls/http/server/tls.crt --tls.internal.server.key-file=/var/run/tls/http/server/tls.key --tls.min-version=VersionTLS12 --tls.cipher-suites=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 --logs.tls.ca-file=/var/run/ca/upstream/service-ca.crt --logs.tls.cert-file=/var/run/tls/http/upstream/tls.crt --logs.tls.key-file=/var/run/tls/http/upstream/tls.key --tls.client-auth-type=RequestClientCert],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},ContainerPort{Name:public,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:rbac,ReadOnly:true,MountPath:/etc/lokistack-gateway/rbac.yaml,SubPath:rbac.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tenants,ReadOnly:true,MountPath:/etc/lokistack-gateway/tenants.yaml,SubPath:tenants.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:lokistack-gateway,ReadOnly:true,MountPath:/etc/lokistack-gateway/lokistack-gateway.rego,SubPath:lokistack-gateway.rego,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tls-secret,ReadOnly:true,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-gateway-client-http,ReadOnly:true,MountPath:/var/run/tls/http/upstream,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:true,MountPath:/var/run/ca/upstream,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-gateway-ca-bundle,ReadOnly:true,MountPath:/var/run/ca/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-ca-bundle,ReadOnly:false,MountPath:/var/run/tenants-ca/cloudkitty,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-brwhj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/live,Port:{0 8081 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 8081 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:12,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-gateway-76cc998948-55lmd_openstack(83f9f0ed-f17f-4e94-bcc7-5108489ea003): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.477655 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" podUID="83f9f0ed-f17f-4e94-bcc7-5108489ea003" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.504861 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-hjhg9"] Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.559264 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c7ef903-af95-4d7e-b379-037c65c8fd4e" path="/var/lib/kubelet/pods/1c7ef903-af95-4d7e-b379-037c65c8fd4e/volumes" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.559829 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b14e8136-cab7-4ee6-a42d-a08418aa617d" path="/var/lib/kubelet/pods/b14e8136-cab7-4ee6-a42d-a08418aa617d/volumes" Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.651805 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n666h5d9h54fh5dch88h97h64ch558h6chbbh57fh64fh586hddh78h8bh6bh5d5h56dhffh58fh64dh5b9h55dh5ch546h56h5f4h59bh5dh9chf4q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fshql,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(cc4d3013-515d-4eb0-a20e-735bcdbed9db): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:38:59 crc kubenswrapper[4768]: W1203 16:38:59.653868 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e6b671c_fd9d_438c_9c0c_9db70f4a63ca.slice/crio-23d99a08b07d148dc196d444e3543f709c5961b0cdf0d298cb3aa2ffaeccb9c3 WatchSource:0}: Error finding container 23d99a08b07d148dc196d444e3543f709c5961b0cdf0d298cb3aa2ffaeccb9c3: Status 404 returned error can't find the container with id 23d99a08b07d148dc196d444e3543f709c5961b0cdf0d298cb3aa2ffaeccb9c3 Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.655118 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:openstack-network-exporter,Image:quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified,Command:[/app/openstack-network-exporter],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:OPENSTACK_NETWORK_EXPORTER_YAML,Value:/etc/config/openstack-network-exporter.yaml,ValueFrom:nil,},EnvVar{Name:CONFIG_HASH,Value:n666h5d9h54fh5dch88h97h64ch558h6chbbh57fh64fh586hddh78h8bh6bh5d5h56dhffh58fh64dh5b9h55dh5ch546h56h5f4h59bh5dh9chf4q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovnmetrics.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovnmetrics.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:metrics-certs-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fshql,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(cc4d3013-515d-4eb0-a20e-735bcdbed9db): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.657060 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"openstack-network-exporter\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack/ovsdbserver-sb-0" podUID="cc4d3013-515d-4eb0-a20e-735bcdbed9db" Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.659267 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5d5h99h589h66dh58dh675h5f8h558h549hc4h55h684h86h5bfh65fhf9h668h549h7bh5cbh5bbh645hf4h5cbh54h5f9h65ch5c9h595h67h685h696q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dc8z7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-hjhg9_openstack(8e6b671c-fd9d-438c-9c0c-9db70f4a63ca): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.660742 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/ovn-controller-ovs-hjhg9" podUID="8e6b671c-fd9d-438c-9c0c-9db70f4a63ca" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.702811 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8","Type":"ContainerStarted","Data":"27bbc932128db472032034abd899bae751a257fbbb43feaba337e3ab9d362712"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.704509 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a0d45532-8a91-4fa5-a7b5-21fdcf44160e","Type":"ContainerStarted","Data":"77eb3c4decb06066be7d79f7f209790f88790f169730bfd83db675242a84b63a"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.707944 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6bhgk" event={"ID":"87225d49-4f3c-44e3-a05d-feee87a94114","Type":"ContainerStarted","Data":"cdbf4d1e8ef630d67c089d90f6133080938d48323d27bf6d5db4d1b1afe55e16"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.710987 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerStarted","Data":"00994b8e4b32c45bd7ae44aac5a5d4df0e2bf34968ecb0ee9ea2624b0394ec6c"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.712144 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hjhg9" event={"ID":"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca","Type":"ContainerStarted","Data":"23d99a08b07d148dc196d444e3543f709c5961b0cdf0d298cb3aa2ffaeccb9c3"} Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.713907 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified\\\"\"" pod="openstack/ovn-controller-ovs-hjhg9" podUID="8e6b671c-fd9d-438c-9c0c-9db70f4a63ca" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.716434 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" event={"ID":"83f9f0ed-f17f-4e94-bcc7-5108489ea003","Type":"ContainerStarted","Data":"e6e2caa69210d9b33b5cf26b7537885c064589cac4e7923ef3430eb39cb1d4ef"} Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.719673 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:710a1a5e486de5724469e55f29e9ff3f6cbef8cd4b2d21dfe254ede2b953c150\\\"\"" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" podUID="83f9f0ed-f17f-4e94-bcc7-5108489ea003" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.722449 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" event={"ID":"5bbec9d6-615c-4007-b056-19ead8728139","Type":"ContainerStarted","Data":"9fa4f6867762f74aced186724be11aeed00189dcfef12acd9a4deba9c06909d3"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.725116 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" event={"ID":"b9983072-bd22-4145-a740-6f479db8e8fd","Type":"ContainerStarted","Data":"533a47935ab7008991abd5e54d3235f88f7ff7a91df196e39effdc9866bc5c69"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.727974 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c42c7f90-2ae1-4222-864c-b8f7f1733beb","Type":"ContainerStarted","Data":"00d77fafc85c03513d3794cf402de8776408dae8ebe7af7c7ec28766d8eb8bab"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.731340 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"8cec597c-1827-4712-b016-5c7cfc55c585","Type":"ContainerStarted","Data":"832a36944c221044c0ef8e4a9db537ee621c576f428499f85d1d25cffd593ca7"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.734161 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" event={"ID":"0e784157-56f0-4c49-a361-4ffc9edecf2b","Type":"ContainerStarted","Data":"ea6663d65d89340cda6cfd9a42f4d1ca76ca166cc987cde54c76105a8d4e3ab0"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.735013 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.736504 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3f6d3b77-fbdf-4dfd-b473-3e8288367442","Type":"ContainerStarted","Data":"0fb426c6b732d67fe86baa46cca429b9fbce49f3a6be0d24f76cf94e88240452"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.737930 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cc4d3013-515d-4eb0-a20e-735bcdbed9db","Type":"ContainerStarted","Data":"c899e7e5504d15a2726f415bca5c52c605b8b3894bf08c8dd8a6f2c9a750e929"} Dec 03 16:38:59 crc kubenswrapper[4768]: E1203 16:38:59.739570 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\", failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"]" pod="openstack/ovsdbserver-sb-0" podUID="cc4d3013-515d-4eb0-a20e-735bcdbed9db" Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.740503 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" event={"ID":"fb71d5cf-3561-4f62-a0c0-980ae81ab050","Type":"ContainerStarted","Data":"f902618fa15943958146ab6329551659eba1b3bf515905f57304ef9ab574bb52"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.744923 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"08387864-260c-4260-bf37-e878d9207c7d","Type":"ContainerStarted","Data":"eec048cc6479bc1794d138aefabe753e2cf8148e37ea9f20c0a206d0244e6dec"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.749119 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" event={"ID":"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5","Type":"ContainerStarted","Data":"44eb904150771dfa758e0074d66594191a194aa88f2ee337492c93cda9c880da"} Dec 03 16:38:59 crc kubenswrapper[4768]: I1203 16:38:59.789976 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" podStartSLOduration=3.452421289 podStartE2EDuration="18.789952879s" podCreationTimestamp="2025-12-03 16:38:41 +0000 UTC" firstStartedPulling="2025-12-03 16:38:42.438420891 +0000 UTC m=+1219.357757314" lastFinishedPulling="2025-12-03 16:38:57.775952491 +0000 UTC m=+1234.695288904" observedRunningTime="2025-12-03 16:38:59.78737942 +0000 UTC m=+1236.706715843" watchObservedRunningTime="2025-12-03 16:38:59.789952879 +0000 UTC m=+1236.709289312" Dec 03 16:39:00 crc kubenswrapper[4768]: E1203 16:39:00.759488 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified\\\"\"" pod="openstack/ovn-controller-ovs-hjhg9" podUID="8e6b671c-fd9d-438c-9c0c-9db70f4a63ca" Dec 03 16:39:00 crc kubenswrapper[4768]: E1203 16:39:00.760067 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:710a1a5e486de5724469e55f29e9ff3f6cbef8cd4b2d21dfe254ede2b953c150\\\"\"" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" podUID="83f9f0ed-f17f-4e94-bcc7-5108489ea003" Dec 03 16:39:00 crc kubenswrapper[4768]: E1203 16:39:00.765237 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\", failed to \"StartContainer\" for \"openstack-network-exporter\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-network-exporter:current-podified\\\"\"]" pod="openstack/ovsdbserver-sb-0" podUID="cc4d3013-515d-4eb0-a20e-735bcdbed9db" Dec 03 16:39:07 crc kubenswrapper[4768]: I1203 16:39:07.131821 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:39:07 crc kubenswrapper[4768]: I1203 16:39:07.215449 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q2vl9"] Dec 03 16:39:11 crc kubenswrapper[4768]: E1203 16:39:11.504068 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 03 16:39:11 crc kubenswrapper[4768]: E1203 16:39:11.504557 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n8ch65ch65bhd9h54fh58h694h547h57ch56fhf6h6h696h68bh67fh65fh56ch66dhcbh658h67dh685h65h56h655h585h64dh545hbdh67h678h56dq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wdqqh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(c42c7f90-2ae1-4222-864c-b8f7f1733beb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:39:11 crc kubenswrapper[4768]: E1203 16:39:11.505872 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="c42c7f90-2ae1-4222-864c-b8f7f1733beb" Dec 03 16:39:11 crc kubenswrapper[4768]: E1203 16:39:11.856666 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="c42c7f90-2ae1-4222-864c-b8f7f1733beb" Dec 03 16:39:12 crc kubenswrapper[4768]: E1203 16:39:12.339853 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62" Dec 03 16:39:12 crc kubenswrapper[4768]: E1203 16:39:12.340109 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init-config-reloader,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62,Command:[/bin/prometheus-config-reloader],Args:[--watch-interval=0 --listen-address=:8081 --config-file=/etc/alertmanager/config/alertmanager.yaml.gz --config-envsubst-file=/etc/alertmanager/config_out/alertmanager.env.yaml --watched-dir=/etc/alertmanager/config],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:reloader-init,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:SHARD,Value:-1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-volume,ReadOnly:true,MountPath:/etc/alertmanager/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-out,ReadOnly:false,MountPath:/etc/alertmanager/config_out,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:web-config,ReadOnly:true,MountPath:/etc/alertmanager/web_config/web-config.yaml,SubPath:web-config.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j2pwb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod alertmanager-metric-storage-0_openstack(f9cb5864-2bfb-49c0-8124-b11beb8fdad1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:39:12 crc kubenswrapper[4768]: E1203 16:39:12.341763 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/alertmanager-metric-storage-0" podUID="f9cb5864-2bfb-49c0-8124-b11beb8fdad1" Dec 03 16:39:12 crc kubenswrapper[4768]: E1203 16:39:12.865984 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init-config-reloader\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:1133c973c7472c665f910a722e19c8e2e27accb34b90fab67f14548627ce9c62\\\"\"" pod="openstack/alertmanager-metric-storage-0" podUID="f9cb5864-2bfb-49c0-8124-b11beb8fdad1" Dec 03 16:39:19 crc kubenswrapper[4768]: E1203 16:39:19.659974 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 03 16:39:19 crc kubenswrapper[4768]: E1203 16:39:19.660970 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8nzlc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(3f6d3b77-fbdf-4dfd-b473-3e8288367442): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:39:19 crc kubenswrapper[4768]: E1203 16:39:19.662325 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="3f6d3b77-fbdf-4dfd-b473-3e8288367442" Dec 03 16:39:19 crc kubenswrapper[4768]: E1203 16:39:19.926423 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="3f6d3b77-fbdf-4dfd-b473-3e8288367442" Dec 03 16:39:19 crc kubenswrapper[4768]: E1203 16:39:19.974911 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 03 16:39:19 crc kubenswrapper[4768]: E1203 16:39:19.975100 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dntns,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(c3dc1084-f02c-45ff-87de-22a8818905b4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:39:19 crc kubenswrapper[4768]: E1203 16:39:19.976347 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="c3dc1084-f02c-45ff-87de-22a8818905b4" Dec 03 16:39:20 crc kubenswrapper[4768]: E1203 16:39:20.740164 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51" Dec 03 16:39:20 crc kubenswrapper[4768]: E1203 16:39:20.741060 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-index-gateway,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51,Command:[],Args:[-target=index-gateway -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:AWS_ACCESS_KEY_ID,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_id,Optional:nil,},},},EnvVar{Name:AWS_ACCESS_KEY_SECRET,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_secret,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:storage,ReadOnly:false,MountPath:/tmp/loki,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-index-gateway-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-loki-s3,ReadOnly:false,MountPath:/etc/storage/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-index-gateway-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7k2h4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-index-gateway-0_openstack(8cec597c-1827-4712-b016-5c7cfc55c585): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:39:20 crc kubenswrapper[4768]: E1203 16:39:20.742523 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-index-gateway\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" podUID="8cec597c-1827-4712-b016-5c7cfc55c585" Dec 03 16:39:20 crc kubenswrapper[4768]: E1203 16:39:20.935928 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-index-gateway\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" podUID="8cec597c-1827-4712-b016-5c7cfc55c585" Dec 03 16:39:20 crc kubenswrapper[4768]: E1203 16:39:20.935963 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="c3dc1084-f02c-45ff-87de-22a8818905b4" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.541420 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:710a1a5e486de5724469e55f29e9ff3f6cbef8cd4b2d21dfe254ede2b953c150" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.541688 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:gateway,Image:registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:710a1a5e486de5724469e55f29e9ff3f6cbef8cd4b2d21dfe254ede2b953c150,Command:[],Args:[--debug.name=lokistack-gateway --web.listen=0.0.0.0:8080 --web.internal.listen=0.0.0.0:8081 --web.healthchecks.url=https://localhost:8080 --log.level=warn --logs.read.endpoint=https://cloudkitty-lokistack-query-frontend-http.openstack.svc.cluster.local:3100 --logs.tail.endpoint=https://cloudkitty-lokistack-query-frontend-http.openstack.svc.cluster.local:3100 --logs.write.endpoint=https://cloudkitty-lokistack-distributor-http.openstack.svc.cluster.local:3100 --logs.write-timeout=4m0s --rbac.config=/etc/lokistack-gateway/rbac.yaml --tenants.config=/etc/lokistack-gateway/tenants.yaml --server.read-timeout=48s --server.write-timeout=6m0s --tls.min-version=VersionTLS12 --tls.server.cert-file=/var/run/tls/http/server/tls.crt --tls.server.key-file=/var/run/tls/http/server/tls.key --tls.healthchecks.server-ca-file=/var/run/ca/server/service-ca.crt --tls.healthchecks.server-name=cloudkitty-lokistack-gateway-http.openstack.svc.cluster.local --tls.internal.server.cert-file=/var/run/tls/http/server/tls.crt --tls.internal.server.key-file=/var/run/tls/http/server/tls.key --tls.min-version=VersionTLS12 --tls.cipher-suites=TLS_AES_128_GCM_SHA256,TLS_AES_256_GCM_SHA384,TLS_CHACHA20_POLY1305_SHA256,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 --logs.tls.ca-file=/var/run/ca/upstream/service-ca.crt --logs.tls.cert-file=/var/run/tls/http/upstream/tls.crt --logs.tls.key-file=/var/run/tls/http/upstream/tls.key --tls.client-auth-type=RequestClientCert],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},ContainerPort{Name:public,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:rbac,ReadOnly:true,MountPath:/etc/lokistack-gateway/rbac.yaml,SubPath:rbac.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tenants,ReadOnly:true,MountPath:/etc/lokistack-gateway/tenants.yaml,SubPath:tenants.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:lokistack-gateway,ReadOnly:true,MountPath:/etc/lokistack-gateway/lokistack-gateway.rego,SubPath:lokistack-gateway.rego,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tls-secret,ReadOnly:true,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-gateway-client-http,ReadOnly:true,MountPath:/var/run/tls/http/upstream,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:true,MountPath:/var/run/ca/upstream,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-gateway-ca-bundle,ReadOnly:true,MountPath:/var/run/ca/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-ca-bundle,ReadOnly:false,MountPath:/var/run/tenants-ca/cloudkitty,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9kt28,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/live,Port:{0 8081 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 8081 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:1,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:12,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-gateway-76cc998948-jhn5q_openstack(b9983072-bd22-4145-a740-6f479db8e8fd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.543489 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" podUID="b9983072-bd22-4145-a740-6f479db8e8fd" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.685195 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.685356 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-distributor,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51,Command:[],Args:[-target=distributor -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:gossip-ring,HostPort:0,ContainerPort:7946,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-distributor-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-distributor-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sxbmq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-distributor-56cd74f89f-x5fb7_openstack(fb71d5cf-3561-4f62-a0c0-980ae81ab050): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.686633 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-distributor\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" podUID="fb71d5cf-3561-4f62-a0c0-980ae81ab050" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.943074 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-distributor\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" podUID="fb71d5cf-3561-4f62-a0c0-980ae81ab050" Dec 03 16:39:21 crc kubenswrapper[4768]: E1203 16:39:21.943843 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"gateway\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/lokistack-gateway-rhel9@sha256:710a1a5e486de5724469e55f29e9ff3f6cbef8cd4b2d21dfe254ede2b953c150\\\"\"" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" podUID="b9983072-bd22-4145-a740-6f479db8e8fd" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.080547 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.081170 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-nb,Image:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f8h6fh88hdch6bh568h557h654h675h5dfhch58h67fh67bh5dh7bh5dhd9h9h8ch559h5bfh676h5b9h668h5b8h5c9h5cfh559h5ddh647h549q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-nb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sf6vf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-nb-0_openstack(a0d45532-8a91-4fa5-a7b5-21fdcf44160e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.372529 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.372923 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-query-frontend,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51,Command:[],Args:[-target=query-frontend -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-query-frontend-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-query-frontend-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fwmxd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-query-frontend-779849886d-xc5w8_openstack(7fed8740-2999-4b8f-bd2a-2bdfea8f03a5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.374311 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-query-frontend\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" podUID="7fed8740-2999-4b8f-bd2a-2bdfea8f03a5" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.597409 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.597708 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-ingester,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51,Command:[],Args:[-target=ingester -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:gossip-ring,HostPort:0,ContainerPort:7946,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:AWS_ACCESS_KEY_ID,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_id,Optional:nil,},},},EnvVar{Name:AWS_ACCESS_KEY_SECRET,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_secret,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:storage,ReadOnly:false,MountPath:/tmp/loki,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:wal,ReadOnly:false,MountPath:/tmp/wal,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ingester-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-loki-s3,ReadOnly:false,MountPath:/etc/storage/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ingester-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j6j6f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-ingester-0_openstack(5c0ad451-c513-4f94-ac08-aaa2c7df9ae8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.599255 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-ingester\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5c0ad451-c513-4f94-ac08-aaa2c7df9ae8" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.988224 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.989313 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5d5h99h589h66dh58dh675h5f8h558h549hc4h55h684h86h5bfh65fhf9h668h549h7bh5cbh5bbh645hf4h5cbh54h5f9h65ch5c9h595h67h685h696q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s9plk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-6bhgk_openstack(87225d49-4f3c-44e3-a05d-feee87a94114): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:39:22 crc kubenswrapper[4768]: E1203 16:39:22.990942 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-6bhgk" podUID="87225d49-4f3c-44e3-a05d-feee87a94114" Dec 03 16:39:23 crc kubenswrapper[4768]: E1203 16:39:23.193265 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-ingester\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5c0ad451-c513-4f94-ac08-aaa2c7df9ae8" Dec 03 16:39:23 crc kubenswrapper[4768]: E1203 16:39:23.194898 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-query-frontend\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:14f37195a4957e3848690d0ffe5422be55f7599b30dfe1ee0f97eb1118a10a51\\\"\"" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" podUID="7fed8740-2999-4b8f-bd2a-2bdfea8f03a5" Dec 03 16:39:24 crc kubenswrapper[4768]: E1203 16:39:24.041287 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified\\\"\"" pod="openstack/ovn-controller-6bhgk" podUID="87225d49-4f3c-44e3-a05d-feee87a94114" Dec 03 16:39:24 crc kubenswrapper[4768]: E1203 16:39:24.827493 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 03 16:39:24 crc kubenswrapper[4768]: E1203 16:39:24.827569 4768 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 03 16:39:24 crc kubenswrapper[4768]: E1203 16:39:24.827794 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wk5th,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(f82fe5c1-e056-4fbb-bca3-7552b85daa9b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 03 16:39:24 crc kubenswrapper[4768]: E1203 16:39:24.829094 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" Dec 03 16:39:24 crc kubenswrapper[4768]: E1203 16:39:24.974957 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" Dec 03 16:39:26 crc kubenswrapper[4768]: I1203 16:39:26.992249 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" event={"ID":"94979b00-3879-404c-82fa-98d29d8b0a2c","Type":"ContainerStarted","Data":"748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092"} Dec 03 16:39:26 crc kubenswrapper[4768]: I1203 16:39:26.992981 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerName="dnsmasq-dns" containerID="cri-o://748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092" gracePeriod=10 Dec 03 16:39:26 crc kubenswrapper[4768]: I1203 16:39:26.993221 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:39:27 crc kubenswrapper[4768]: I1203 16:39:27.020644 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" podStartSLOduration=30.887291206 podStartE2EDuration="46.02062161s" podCreationTimestamp="2025-12-03 16:38:41 +0000 UTC" firstStartedPulling="2025-12-03 16:38:42.343548355 +0000 UTC m=+1219.262884778" lastFinishedPulling="2025-12-03 16:38:57.476878759 +0000 UTC m=+1234.396215182" observedRunningTime="2025-12-03 16:39:27.01540494 +0000 UTC m=+1263.934741373" watchObservedRunningTime="2025-12-03 16:39:27.02062161 +0000 UTC m=+1263.939958033" Dec 03 16:39:27 crc kubenswrapper[4768]: I1203 16:39:27.688109 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:39:27 crc kubenswrapper[4768]: I1203 16:39:27.828699 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-dns-svc\") pod \"94979b00-3879-404c-82fa-98d29d8b0a2c\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " Dec 03 16:39:27 crc kubenswrapper[4768]: I1203 16:39:27.828949 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzfzk\" (UniqueName: \"kubernetes.io/projected/94979b00-3879-404c-82fa-98d29d8b0a2c-kube-api-access-pzfzk\") pod \"94979b00-3879-404c-82fa-98d29d8b0a2c\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " Dec 03 16:39:27 crc kubenswrapper[4768]: I1203 16:39:27.828986 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-config\") pod \"94979b00-3879-404c-82fa-98d29d8b0a2c\" (UID: \"94979b00-3879-404c-82fa-98d29d8b0a2c\") " Dec 03 16:39:27 crc kubenswrapper[4768]: I1203 16:39:27.846867 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94979b00-3879-404c-82fa-98d29d8b0a2c-kube-api-access-pzfzk" (OuterVolumeSpecName: "kube-api-access-pzfzk") pod "94979b00-3879-404c-82fa-98d29d8b0a2c" (UID: "94979b00-3879-404c-82fa-98d29d8b0a2c"). InnerVolumeSpecName "kube-api-access-pzfzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:39:27 crc kubenswrapper[4768]: I1203 16:39:27.932063 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzfzk\" (UniqueName: \"kubernetes.io/projected/94979b00-3879-404c-82fa-98d29d8b0a2c-kube-api-access-pzfzk\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:27 crc kubenswrapper[4768]: E1203 16:39:27.978248 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-nb-0" podUID="a0d45532-8a91-4fa5-a7b5-21fdcf44160e" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.002736 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" event={"ID":"83f9f0ed-f17f-4e94-bcc7-5108489ea003","Type":"ContainerStarted","Data":"d059f59716836c704729ed6f998337b6c39fb0f038496f8728ef4eb8e0803078"} Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.003424 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.005469 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" event={"ID":"5bbec9d6-615c-4007-b056-19ead8728139","Type":"ContainerStarted","Data":"218af84b6234eee998cd725cae30099f00537985808583e8997a29a7c5cba31e"} Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.005624 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.007050 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"08387864-260c-4260-bf37-e878d9207c7d","Type":"ContainerStarted","Data":"a76850c341a3a0a541c990938e881fed12716d9ec2b31b074cc9573e54d72d45"} Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.007172 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.008761 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a0d45532-8a91-4fa5-a7b5-21fdcf44160e","Type":"ContainerStarted","Data":"68b96d368e0c9d03eefec47b661c3903e84eef318c9dadb32c83b0ab68c2449b"} Dec 03 16:39:28 crc kubenswrapper[4768]: E1203 16:39:28.010409 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="a0d45532-8a91-4fa5-a7b5-21fdcf44160e" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.011204 4768 generic.go:334] "Generic (PLEG): container finished" podID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerID="748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092" exitCode=0 Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.011237 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.011252 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" event={"ID":"94979b00-3879-404c-82fa-98d29d8b0a2c","Type":"ContainerDied","Data":"748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092"} Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.011559 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-q2vl9" event={"ID":"94979b00-3879-404c-82fa-98d29d8b0a2c","Type":"ContainerDied","Data":"2a09af584efd69942ed79296e462d20a8bbeab80c427e7e9bfcb1f79008446c4"} Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.011587 4768 scope.go:117] "RemoveContainer" containerID="748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.018171 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.028109 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-55lmd" podStartSLOduration=7.550465683 podStartE2EDuration="32.028089217s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.475873885 +0000 UTC m=+1236.395210308" lastFinishedPulling="2025-12-03 16:39:23.953497409 +0000 UTC m=+1260.872833842" observedRunningTime="2025-12-03 16:39:28.019948818 +0000 UTC m=+1264.939285261" watchObservedRunningTime="2025-12-03 16:39:28.028089217 +0000 UTC m=+1264.947425650" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.078979 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=6.725008224 podStartE2EDuration="32.078961073s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.406397359 +0000 UTC m=+1236.325733782" lastFinishedPulling="2025-12-03 16:39:24.760350208 +0000 UTC m=+1261.679686631" observedRunningTime="2025-12-03 16:39:28.078370817 +0000 UTC m=+1264.997707240" watchObservedRunningTime="2025-12-03 16:39:28.078961073 +0000 UTC m=+1264.998297496" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.129248 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" podStartSLOduration=5.290516939 podStartE2EDuration="32.129223982s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.389061614 +0000 UTC m=+1236.308398027" lastFinishedPulling="2025-12-03 16:39:26.227768647 +0000 UTC m=+1263.147105070" observedRunningTime="2025-12-03 16:39:28.120877528 +0000 UTC m=+1265.040213971" watchObservedRunningTime="2025-12-03 16:39:28.129223982 +0000 UTC m=+1265.048560405" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.682474 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-config" (OuterVolumeSpecName: "config") pod "94979b00-3879-404c-82fa-98d29d8b0a2c" (UID: "94979b00-3879-404c-82fa-98d29d8b0a2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.745107 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.791834 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "94979b00-3879-404c-82fa-98d29d8b0a2c" (UID: "94979b00-3879-404c-82fa-98d29d8b0a2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:28 crc kubenswrapper[4768]: I1203 16:39:28.846659 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94979b00-3879-404c-82fa-98d29d8b0a2c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.026538 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0b8fcf68-a566-4dc2-9137-4b1e85eede0f","Type":"ContainerStarted","Data":"a8e17f753020d4487160b9ae587a0c00bfee7bf215ce333c439e75659175f6e7"} Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.029901 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hjhg9" event={"ID":"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca","Type":"ContainerStarted","Data":"f9580e7b8ba236506643e944d5da01889301f1c9fb3fcd30569399791c28321a"} Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.032925 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cc4d3013-515d-4eb0-a20e-735bcdbed9db","Type":"ContainerStarted","Data":"e40accb02db429123af209fef7f396b02585476f673dc4c4ffa06ebf7914e64a"} Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.035424 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"c42c7f90-2ae1-4222-864c-b8f7f1733beb","Type":"ContainerStarted","Data":"b7dbc5f90e4758d601d5084c64666e70e65faed72395b91eaf04d83f2b50dbb0"} Dec 03 16:39:29 crc kubenswrapper[4768]: E1203 16:39:29.039883 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-nb-0" podUID="a0d45532-8a91-4fa5-a7b5-21fdcf44160e" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.137935 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=14.898258494 podStartE2EDuration="43.137915062s" podCreationTimestamp="2025-12-03 16:38:46 +0000 UTC" firstStartedPulling="2025-12-03 16:38:58.678373617 +0000 UTC m=+1235.597710040" lastFinishedPulling="2025-12-03 16:39:26.918030175 +0000 UTC m=+1263.837366608" observedRunningTime="2025-12-03 16:39:29.129692531 +0000 UTC m=+1266.049028964" watchObservedRunningTime="2025-12-03 16:39:29.137915062 +0000 UTC m=+1266.057251485" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.228987 4768 scope.go:117] "RemoveContainer" containerID="afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.251475 4768 scope.go:117] "RemoveContainer" containerID="748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092" Dec 03 16:39:29 crc kubenswrapper[4768]: E1203 16:39:29.253018 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092\": container with ID starting with 748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092 not found: ID does not exist" containerID="748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.253055 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092"} err="failed to get container status \"748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092\": rpc error: code = NotFound desc = could not find container \"748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092\": container with ID starting with 748ed7b05f6eb52fbbb9a103e0f15832a53dba2b9a13f69b6811f32919871092 not found: ID does not exist" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.253077 4768 scope.go:117] "RemoveContainer" containerID="afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478" Dec 03 16:39:29 crc kubenswrapper[4768]: E1203 16:39:29.253413 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478\": container with ID starting with afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478 not found: ID does not exist" containerID="afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.253555 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478"} err="failed to get container status \"afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478\": rpc error: code = NotFound desc = could not find container \"afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478\": container with ID starting with afbb4b0665f6995356b8fedc1156a6d0d02ac81cad620e030c8db909b99f7478 not found: ID does not exist" Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.261554 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q2vl9"] Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.270165 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-q2vl9"] Dec 03 16:39:29 crc kubenswrapper[4768]: I1203 16:39:29.547342 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" path="/var/lib/kubelet/pods/94979b00-3879-404c-82fa-98d29d8b0a2c/volumes" Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.049985 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cc4d3013-515d-4eb0-a20e-735bcdbed9db","Type":"ContainerStarted","Data":"9aca3ae170f786573734f8b4350d9eb1198fd900ad04cb64582c1cc36202cd57"} Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.052320 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerStarted","Data":"cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc"} Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.054283 4768 generic.go:334] "Generic (PLEG): container finished" podID="8e6b671c-fd9d-438c-9c0c-9db70f4a63ca" containerID="f9580e7b8ba236506643e944d5da01889301f1c9fb3fcd30569399791c28321a" exitCode=0 Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.054371 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hjhg9" event={"ID":"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca","Type":"ContainerDied","Data":"f9580e7b8ba236506643e944d5da01889301f1c9fb3fcd30569399791c28321a"} Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.059502 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f9cb5864-2bfb-49c0-8124-b11beb8fdad1","Type":"ContainerStarted","Data":"235964695f32c05ead7a64d02d986e3774e0cb1e6738ae1961a83b1ca681aeaa"} Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.065636 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5061b6be-b64d-4dfd-8431-701066b8cefa","Type":"ContainerStarted","Data":"294cadb0d1f3f85d9f524a4aefc23a1ebc5763fc2780bdaeac0caaf1e2d2aa87"} Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.117226 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=7.539939869 podStartE2EDuration="34.117200612s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.651318156 +0000 UTC m=+1236.570654579" lastFinishedPulling="2025-12-03 16:39:26.228578899 +0000 UTC m=+1263.147915322" observedRunningTime="2025-12-03 16:39:30.083958909 +0000 UTC m=+1267.003295372" watchObservedRunningTime="2025-12-03 16:39:30.117200612 +0000 UTC m=+1267.036537075" Dec 03 16:39:30 crc kubenswrapper[4768]: I1203 16:39:30.407277 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 03 16:39:31 crc kubenswrapper[4768]: I1203 16:39:31.075476 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hjhg9" event={"ID":"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca","Type":"ContainerStarted","Data":"e0c7dd962a4c1b8f1d767183d631b9510f2899eb84f661d55207b9c384fd7454"} Dec 03 16:39:31 crc kubenswrapper[4768]: I1203 16:39:31.075943 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-hjhg9" event={"ID":"8e6b671c-fd9d-438c-9c0c-9db70f4a63ca","Type":"ContainerStarted","Data":"7af7a7db6745cd5588626dee6c2e16ce61785eed44adcc1ec1f15a7aa0df9d26"} Dec 03 16:39:31 crc kubenswrapper[4768]: I1203 16:39:31.076451 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:39:31 crc kubenswrapper[4768]: I1203 16:39:31.076482 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:39:31 crc kubenswrapper[4768]: I1203 16:39:31.101993 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-hjhg9" podStartSLOduration=13.807576076 podStartE2EDuration="38.10197185s" podCreationTimestamp="2025-12-03 16:38:53 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.659104205 +0000 UTC m=+1236.578440628" lastFinishedPulling="2025-12-03 16:39:23.953499979 +0000 UTC m=+1260.872836402" observedRunningTime="2025-12-03 16:39:31.094538121 +0000 UTC m=+1268.013874544" watchObservedRunningTime="2025-12-03 16:39:31.10197185 +0000 UTC m=+1268.021308273" Dec 03 16:39:31 crc kubenswrapper[4768]: I1203 16:39:31.500367 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 03 16:39:32 crc kubenswrapper[4768]: I1203 16:39:32.406665 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 03 16:39:33 crc kubenswrapper[4768]: I1203 16:39:33.443335 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.118018 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3f6d3b77-fbdf-4dfd-b473-3e8288367442","Type":"ContainerStarted","Data":"76e6a96fd53f3a504e129b9b2e1abe8b723da28ba2e99e00023ec0ae0fa34f80"} Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.167108 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.459660 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-lflqb"] Dec 03 16:39:34 crc kubenswrapper[4768]: E1203 16:39:34.460821 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerName="init" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.460841 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerName="init" Dec 03 16:39:34 crc kubenswrapper[4768]: E1203 16:39:34.460861 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerName="dnsmasq-dns" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.460869 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerName="dnsmasq-dns" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.461179 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="94979b00-3879-404c-82fa-98d29d8b0a2c" containerName="dnsmasq-dns" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.470826 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.474982 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.475129 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.475239 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsgjr\" (UniqueName: \"kubernetes.io/projected/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-kube-api-access-qsgjr\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.475379 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-config\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.483278 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.498452 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-lflqb"] Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.523030 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-l8llw"] Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.524239 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.526485 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.550365 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-l8llw"] Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.577133 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.577183 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.577237 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsgjr\" (UniqueName: \"kubernetes.io/projected/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-kube-api-access-qsgjr\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.577300 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-config\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.578359 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.579254 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.580172 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-config\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.606283 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsgjr\" (UniqueName: \"kubernetes.io/projected/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-kube-api-access-qsgjr\") pod \"dnsmasq-dns-7f896c8c65-lflqb\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.678656 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ce67993a-adfa-412b-9c3c-37c6bb25f007-ovs-rundir\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.678801 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce67993a-adfa-412b-9c3c-37c6bb25f007-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.678818 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trvw9\" (UniqueName: \"kubernetes.io/projected/ce67993a-adfa-412b-9c3c-37c6bb25f007-kube-api-access-trvw9\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.678852 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce67993a-adfa-412b-9c3c-37c6bb25f007-combined-ca-bundle\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.678926 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce67993a-adfa-412b-9c3c-37c6bb25f007-config\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.678991 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ce67993a-adfa-412b-9c3c-37c6bb25f007-ovn-rundir\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.707838 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-lflqb"] Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.708882 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.726584 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-sdrjd"] Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.728184 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.732304 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.738921 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-sdrjd"] Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.780298 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ce67993a-adfa-412b-9c3c-37c6bb25f007-ovs-rundir\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.780402 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce67993a-adfa-412b-9c3c-37c6bb25f007-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.780430 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trvw9\" (UniqueName: \"kubernetes.io/projected/ce67993a-adfa-412b-9c3c-37c6bb25f007-kube-api-access-trvw9\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.780466 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce67993a-adfa-412b-9c3c-37c6bb25f007-combined-ca-bundle\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.780522 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce67993a-adfa-412b-9c3c-37c6bb25f007-config\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.780583 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ce67993a-adfa-412b-9c3c-37c6bb25f007-ovn-rundir\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.780730 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/ce67993a-adfa-412b-9c3c-37c6bb25f007-ovn-rundir\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.781033 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/ce67993a-adfa-412b-9c3c-37c6bb25f007-ovs-rundir\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.781419 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce67993a-adfa-412b-9c3c-37c6bb25f007-config\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.789828 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce67993a-adfa-412b-9c3c-37c6bb25f007-combined-ca-bundle\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.789901 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce67993a-adfa-412b-9c3c-37c6bb25f007-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.798623 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trvw9\" (UniqueName: \"kubernetes.io/projected/ce67993a-adfa-412b-9c3c-37c6bb25f007-kube-api-access-trvw9\") pod \"ovn-controller-metrics-l8llw\" (UID: \"ce67993a-adfa-412b-9c3c-37c6bb25f007\") " pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.852555 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-l8llw" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.881884 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.881938 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-config\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.881974 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.882016 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r72h4\" (UniqueName: \"kubernetes.io/projected/9323f350-627d-4270-a0f4-a74e35261cc3-kube-api-access-r72h4\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.882056 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.983290 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.983434 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.983466 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-config\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.983523 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.984196 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.984387 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.984523 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-config\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.984589 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r72h4\" (UniqueName: \"kubernetes.io/projected/9323f350-627d-4270-a0f4-a74e35261cc3-kube-api-access-r72h4\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:34 crc kubenswrapper[4768]: I1203 16:39:34.984795 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:35 crc kubenswrapper[4768]: I1203 16:39:35.008452 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r72h4\" (UniqueName: \"kubernetes.io/projected/9323f350-627d-4270-a0f4-a74e35261cc3-kube-api-access-r72h4\") pod \"dnsmasq-dns-86db49b7ff-sdrjd\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:35 crc kubenswrapper[4768]: I1203 16:39:35.109431 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:35 crc kubenswrapper[4768]: I1203 16:39:35.214992 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-lflqb"] Dec 03 16:39:35 crc kubenswrapper[4768]: W1203 16:39:35.221834 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bedd37e_deda_4ea2_88f5_bcb3b01433fc.slice/crio-a232eaa9b5fa3eb3fb988f9fd50783df059afd856f6b41d79788c7a4a6b398e8 WatchSource:0}: Error finding container a232eaa9b5fa3eb3fb988f9fd50783df059afd856f6b41d79788c7a4a6b398e8: Status 404 returned error can't find the container with id a232eaa9b5fa3eb3fb988f9fd50783df059afd856f6b41d79788c7a4a6b398e8 Dec 03 16:39:35 crc kubenswrapper[4768]: I1203 16:39:35.310459 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-l8llw"] Dec 03 16:39:35 crc kubenswrapper[4768]: I1203 16:39:35.552129 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-sdrjd"] Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.139356 4768 generic.go:334] "Generic (PLEG): container finished" podID="4bedd37e-deda-4ea2-88f5-bcb3b01433fc" containerID="d3ce3c92e505bc5e152d1919cb393f1464ff4a676333d288d6b943ab018a3659" exitCode=0 Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.139704 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" event={"ID":"4bedd37e-deda-4ea2-88f5-bcb3b01433fc","Type":"ContainerDied","Data":"d3ce3c92e505bc5e152d1919cb393f1464ff4a676333d288d6b943ab018a3659"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.139729 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" event={"ID":"4bedd37e-deda-4ea2-88f5-bcb3b01433fc","Type":"ContainerStarted","Data":"a232eaa9b5fa3eb3fb988f9fd50783df059afd856f6b41d79788c7a4a6b398e8"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.150656 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l8llw" event={"ID":"ce67993a-adfa-412b-9c3c-37c6bb25f007","Type":"ContainerStarted","Data":"37f4a76f1ce9b739f108a08310c96e6cef59706f8705f90937a9a03a01fb0815"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.150693 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-l8llw" event={"ID":"ce67993a-adfa-412b-9c3c-37c6bb25f007","Type":"ContainerStarted","Data":"84c7188975c20ca78181ec7e3571f1193f83e724baed218c21ea120606f09407"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.159925 4768 generic.go:334] "Generic (PLEG): container finished" podID="9323f350-627d-4270-a0f4-a74e35261cc3" containerID="8d99cad7b59c8235d2b4fd37cbd082cf637e8f8c0ad462acf4a658800fafece0" exitCode=0 Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.160001 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" event={"ID":"9323f350-627d-4270-a0f4-a74e35261cc3","Type":"ContainerDied","Data":"8d99cad7b59c8235d2b4fd37cbd082cf637e8f8c0ad462acf4a658800fafece0"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.160027 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" event={"ID":"9323f350-627d-4270-a0f4-a74e35261cc3","Type":"ContainerStarted","Data":"08c3a0b23365843df5e831c0ae3fcedfef113b49a112788700026593948f075f"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.164845 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"5c0ad451-c513-4f94-ac08-aaa2c7df9ae8","Type":"ContainerStarted","Data":"3926567e1f62661fce616ddd72483d490fb8811b04d93153d30941be50acf52b"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.165051 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.167096 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"8cec597c-1827-4712-b016-5c7cfc55c585","Type":"ContainerStarted","Data":"c4e45a73baa82bf6ee183c9f507d5669bd725dde78cd3c39e12f313f84e771aa"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.168108 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.169850 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" event={"ID":"b9983072-bd22-4145-a740-6f479db8e8fd","Type":"ContainerStarted","Data":"db0132ed1b97ed77f13911f3fafff52c760662a3a9309a0ba43082a2e739ff35"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.170923 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.178930 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3dc1084-f02c-45ff-87de-22a8818905b4","Type":"ContainerStarted","Data":"7c9603f9d24efc6b3404655b33beca690cdea546c24da984ce2f60f730fa65a3"} Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.212568 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.219893 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-l8llw" podStartSLOduration=2.219872799 podStartE2EDuration="2.219872799s" podCreationTimestamp="2025-12-03 16:39:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:36.174625134 +0000 UTC m=+1273.093961577" watchObservedRunningTime="2025-12-03 16:39:36.219872799 +0000 UTC m=+1273.139209222" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.238407 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=-9223371996.616392 podStartE2EDuration="40.238384246s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.365017528 +0000 UTC m=+1236.284353951" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:36.199318927 +0000 UTC m=+1273.118655370" watchObservedRunningTime="2025-12-03 16:39:36.238384246 +0000 UTC m=+1273.157720679" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.288158 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-76cc998948-jhn5q" podStartSLOduration=-9223371996.56664 podStartE2EDuration="40.288134272s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.404292383 +0000 UTC m=+1236.323628806" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:36.226534488 +0000 UTC m=+1273.145870921" watchObservedRunningTime="2025-12-03 16:39:36.288134272 +0000 UTC m=+1273.207470705" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.311849 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=-9223371996.542946 podStartE2EDuration="40.311830819s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.394003386 +0000 UTC m=+1236.313339809" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:36.294136033 +0000 UTC m=+1273.213472456" watchObservedRunningTime="2025-12-03 16:39:36.311830819 +0000 UTC m=+1273.231167242" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.501064 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.851295 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.938731 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-dns-svc\") pod \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.938809 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-ovsdbserver-sb\") pod \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.938880 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-config\") pod \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.939047 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsgjr\" (UniqueName: \"kubernetes.io/projected/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-kube-api-access-qsgjr\") pod \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\" (UID: \"4bedd37e-deda-4ea2-88f5-bcb3b01433fc\") " Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.961154 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-kube-api-access-qsgjr" (OuterVolumeSpecName: "kube-api-access-qsgjr") pod "4bedd37e-deda-4ea2-88f5-bcb3b01433fc" (UID: "4bedd37e-deda-4ea2-88f5-bcb3b01433fc"). InnerVolumeSpecName "kube-api-access-qsgjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.971716 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4bedd37e-deda-4ea2-88f5-bcb3b01433fc" (UID: "4bedd37e-deda-4ea2-88f5-bcb3b01433fc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.986241 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-config" (OuterVolumeSpecName: "config") pod "4bedd37e-deda-4ea2-88f5-bcb3b01433fc" (UID: "4bedd37e-deda-4ea2-88f5-bcb3b01433fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:36 crc kubenswrapper[4768]: I1203 16:39:36.989277 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4bedd37e-deda-4ea2-88f5-bcb3b01433fc" (UID: "4bedd37e-deda-4ea2-88f5-bcb3b01433fc"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.042781 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsgjr\" (UniqueName: \"kubernetes.io/projected/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-kube-api-access-qsgjr\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.042821 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.042831 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.042839 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bedd37e-deda-4ea2-88f5-bcb3b01433fc-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.189866 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" event={"ID":"9323f350-627d-4270-a0f4-a74e35261cc3","Type":"ContainerStarted","Data":"83282c89f9382e0c745df5314aecc0e6484d89e9aae67f5ef5aece60cf77fd61"} Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.190563 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.192099 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" event={"ID":"7fed8740-2999-4b8f-bd2a-2bdfea8f03a5","Type":"ContainerStarted","Data":"e3f6c1a36c5fd47c54f77831ccb6e623337f730befd26ef5854375b7077cdbc0"} Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.192568 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.193863 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f82fe5c1-e056-4fbb-bca3-7552b85daa9b","Type":"ContainerStarted","Data":"345c34cf99875fbe860f80abf60dcb72da8f284ebef0c7ec0242374b1ff2786f"} Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.194148 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.195424 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6bhgk" event={"ID":"87225d49-4f3c-44e3-a05d-feee87a94114","Type":"ContainerStarted","Data":"10bb1f67ecb79e94cabc942e913f2c862766578a78c4d8dc97d71f9d93673ab7"} Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.195586 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-6bhgk" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.196764 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.196764 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-lflqb" event={"ID":"4bedd37e-deda-4ea2-88f5-bcb3b01433fc","Type":"ContainerDied","Data":"a232eaa9b5fa3eb3fb988f9fd50783df059afd856f6b41d79788c7a4a6b398e8"} Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.196885 4768 scope.go:117] "RemoveContainer" containerID="d3ce3c92e505bc5e152d1919cb393f1464ff4a676333d288d6b943ab018a3659" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.198201 4768 generic.go:334] "Generic (PLEG): container finished" podID="f9cb5864-2bfb-49c0-8124-b11beb8fdad1" containerID="235964695f32c05ead7a64d02d986e3774e0cb1e6738ae1961a83b1ca681aeaa" exitCode=0 Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.198760 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f9cb5864-2bfb-49c0-8124-b11beb8fdad1","Type":"ContainerDied","Data":"235964695f32c05ead7a64d02d986e3774e0cb1e6738ae1961a83b1ca681aeaa"} Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.242137 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" podStartSLOduration=3.242120193 podStartE2EDuration="3.242120193s" podCreationTimestamp="2025-12-03 16:39:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:37.235047533 +0000 UTC m=+1274.154383966" watchObservedRunningTime="2025-12-03 16:39:37.242120193 +0000 UTC m=+1274.161456616" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.304838 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" podStartSLOduration=-9223371995.549953 podStartE2EDuration="41.304821667s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.364697989 +0000 UTC m=+1236.284034412" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:37.268052249 +0000 UTC m=+1274.187388672" watchObservedRunningTime="2025-12-03 16:39:37.304821667 +0000 UTC m=+1274.224158090" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.346149 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=12.812187347 podStartE2EDuration="50.346129316s" podCreationTimestamp="2025-12-03 16:38:47 +0000 UTC" firstStartedPulling="2025-12-03 16:38:58.58129366 +0000 UTC m=+1235.500630073" lastFinishedPulling="2025-12-03 16:39:36.115235619 +0000 UTC m=+1273.034572042" observedRunningTime="2025-12-03 16:39:37.33025564 +0000 UTC m=+1274.249592063" watchObservedRunningTime="2025-12-03 16:39:37.346129316 +0000 UTC m=+1274.265465739" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.346493 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-6bhgk" podStartSLOduration=7.5959330959999996 podStartE2EDuration="44.346488786s" podCreationTimestamp="2025-12-03 16:38:53 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.367571606 +0000 UTC m=+1236.286908029" lastFinishedPulling="2025-12-03 16:39:36.118127296 +0000 UTC m=+1273.037463719" observedRunningTime="2025-12-03 16:39:37.345387386 +0000 UTC m=+1274.264723809" watchObservedRunningTime="2025-12-03 16:39:37.346488786 +0000 UTC m=+1274.265825209" Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.394744 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-lflqb"] Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.404073 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-lflqb"] Dec 03 16:39:37 crc kubenswrapper[4768]: I1203 16:39:37.542290 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bedd37e-deda-4ea2-88f5-bcb3b01433fc" path="/var/lib/kubelet/pods/4bedd37e-deda-4ea2-88f5-bcb3b01433fc/volumes" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.065433 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-sdrjd"] Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.113451 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-vlllk"] Dec 03 16:39:38 crc kubenswrapper[4768]: E1203 16:39:38.114027 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bedd37e-deda-4ea2-88f5-bcb3b01433fc" containerName="init" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.114046 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bedd37e-deda-4ea2-88f5-bcb3b01433fc" containerName="init" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.114414 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bedd37e-deda-4ea2-88f5-bcb3b01433fc" containerName="init" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.116014 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.169356 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-vlllk"] Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.180050 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgbtr\" (UniqueName: \"kubernetes.io/projected/ec1d296a-9fca-4e6d-8463-fc948d519080-kube-api-access-pgbtr\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.180156 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-dns-svc\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.180178 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-config\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.180198 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.180237 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.208041 4768 generic.go:334] "Generic (PLEG): container finished" podID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerID="cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc" exitCode=0 Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.208291 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerDied","Data":"cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc"} Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.212553 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" event={"ID":"fb71d5cf-3561-4f62-a0c0-980ae81ab050","Type":"ContainerStarted","Data":"61194fec1b3f23743bdca21297fb306650c4c056f03fa2bac69ca240f416ec1b"} Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.212954 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.250569 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" podStartSLOduration=-9223371994.604223 podStartE2EDuration="42.250553326s" podCreationTimestamp="2025-12-03 16:38:56 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.640130415 +0000 UTC m=+1236.559466838" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:38.248721746 +0000 UTC m=+1275.168058169" watchObservedRunningTime="2025-12-03 16:39:38.250553326 +0000 UTC m=+1275.169889739" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.281877 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-config\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.281945 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.282027 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.282195 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgbtr\" (UniqueName: \"kubernetes.io/projected/ec1d296a-9fca-4e6d-8463-fc948d519080-kube-api-access-pgbtr\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.283101 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.283106 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.283587 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-dns-svc\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.284139 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-dns-svc\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.284990 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-config\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.301815 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgbtr\" (UniqueName: \"kubernetes.io/projected/ec1d296a-9fca-4e6d-8463-fc948d519080-kube-api-access-pgbtr\") pod \"dnsmasq-dns-698758b865-vlllk\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.478832 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:38 crc kubenswrapper[4768]: I1203 16:39:38.924877 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-vlllk"] Dec 03 16:39:38 crc kubenswrapper[4768]: W1203 16:39:38.931397 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec1d296a_9fca_4e6d_8463_fc948d519080.slice/crio-f0d7e2a21b256ef2a962d2d0a1d6d9a2275b897fa892f318b49962da02982ef6 WatchSource:0}: Error finding container f0d7e2a21b256ef2a962d2d0a1d6d9a2275b897fa892f318b49962da02982ef6: Status 404 returned error can't find the container with id f0d7e2a21b256ef2a962d2d0a1d6d9a2275b897fa892f318b49962da02982ef6 Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.163085 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.175892 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.179367 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.179392 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.180431 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-5nxrd" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.180617 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.187983 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.200800 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.200841 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m449\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-kube-api-access-8m449\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.200864 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.200916 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/147901f2-6caa-4983-8e45-7e938cd9f36b-cache\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.201014 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/147901f2-6caa-4983-8e45-7e938cd9f36b-lock\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.241829 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-vlllk" event={"ID":"ec1d296a-9fca-4e6d-8463-fc948d519080","Type":"ContainerStarted","Data":"f0d7e2a21b256ef2a962d2d0a1d6d9a2275b897fa892f318b49962da02982ef6"} Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.242073 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" podUID="9323f350-627d-4270-a0f4-a74e35261cc3" containerName="dnsmasq-dns" containerID="cri-o://83282c89f9382e0c745df5314aecc0e6484d89e9aae67f5ef5aece60cf77fd61" gracePeriod=10 Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.302264 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.302301 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m449\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-kube-api-access-8m449\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.302321 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.302365 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/147901f2-6caa-4983-8e45-7e938cd9f36b-cache\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.302459 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/147901f2-6caa-4983-8e45-7e938cd9f36b-lock\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: E1203 16:39:39.303342 4768 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:39:39 crc kubenswrapper[4768]: E1203 16:39:39.303366 4768 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:39:39 crc kubenswrapper[4768]: E1203 16:39:39.303405 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift podName:147901f2-6caa-4983-8e45-7e938cd9f36b nodeName:}" failed. No retries permitted until 2025-12-03 16:39:39.80339232 +0000 UTC m=+1276.722728743 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift") pod "swift-storage-0" (UID: "147901f2-6caa-4983-8e45-7e938cd9f36b") : configmap "swift-ring-files" not found Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.303786 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/147901f2-6caa-4983-8e45-7e938cd9f36b-lock\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.303996 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/147901f2-6caa-4983-8e45-7e938cd9f36b-cache\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.305773 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.305799 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ced95c9c2827e381653aa8bc70855cb804316b3a9ba748006cb683743c669d82/globalmount\"" pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.321774 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m449\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-kube-api-access-8m449\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.357233 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e7afa1a7-7b1d-41f5-bddf-eb75440923ab\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.671929 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-6mrgn"] Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.673220 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.678526 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.678761 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.678861 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.680730 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-6mrgn"] Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.705042 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-6mrgn"] Dec 03 16:39:39 crc kubenswrapper[4768]: E1203 16:39:39.706092 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-55wxg ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-55wxg ring-data-devices scripts swiftconf]: context canceled" pod="openstack/swift-ring-rebalance-6mrgn" podUID="30a5a7f3-b504-40b9-97a3-5f1634bed034" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.716316 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55wxg\" (UniqueName: \"kubernetes.io/projected/30a5a7f3-b504-40b9-97a3-5f1634bed034-kube-api-access-55wxg\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.716395 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-combined-ca-bundle\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.716416 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-ring-data-devices\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.716458 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/30a5a7f3-b504-40b9-97a3-5f1634bed034-etc-swift\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.716513 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-scripts\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.716572 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-dispersionconf\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.716588 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-swiftconf\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.734875 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-rxvbr"] Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.736116 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.767800 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-rxvbr"] Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.818796 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tcbl\" (UniqueName: \"kubernetes.io/projected/43a9322c-e5fe-40d8-849f-dc84a5763f9c-kube-api-access-5tcbl\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.818843 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-combined-ca-bundle\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.818868 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-dispersionconf\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.818887 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-ring-data-devices\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.818910 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-ring-data-devices\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.818945 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-swiftconf\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.818970 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/30a5a7f3-b504-40b9-97a3-5f1634bed034-etc-swift\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819013 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-scripts\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819184 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-scripts\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819240 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/43a9322c-e5fe-40d8-849f-dc84a5763f9c-etc-swift\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819296 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-combined-ca-bundle\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819348 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819352 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/30a5a7f3-b504-40b9-97a3-5f1634bed034-etc-swift\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819365 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-dispersionconf\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819393 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-swiftconf\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.819444 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55wxg\" (UniqueName: \"kubernetes.io/projected/30a5a7f3-b504-40b9-97a3-5f1634bed034-kube-api-access-55wxg\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: E1203 16:39:39.819793 4768 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:39:39 crc kubenswrapper[4768]: E1203 16:39:39.819811 4768 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:39:39 crc kubenswrapper[4768]: E1203 16:39:39.819853 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift podName:147901f2-6caa-4983-8e45-7e938cd9f36b nodeName:}" failed. No retries permitted until 2025-12-03 16:39:40.8198387 +0000 UTC m=+1277.739175123 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift") pod "swift-storage-0" (UID: "147901f2-6caa-4983-8e45-7e938cd9f36b") : configmap "swift-ring-files" not found Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.820248 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-ring-data-devices\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.820285 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-scripts\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.823853 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-swiftconf\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.823921 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-dispersionconf\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.825193 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-combined-ca-bundle\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.833949 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55wxg\" (UniqueName: \"kubernetes.io/projected/30a5a7f3-b504-40b9-97a3-5f1634bed034-kube-api-access-55wxg\") pod \"swift-ring-rebalance-6mrgn\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.920840 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-combined-ca-bundle\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.920969 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tcbl\" (UniqueName: \"kubernetes.io/projected/43a9322c-e5fe-40d8-849f-dc84a5763f9c-kube-api-access-5tcbl\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.920993 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-dispersionconf\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.921016 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-ring-data-devices\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.921040 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-swiftconf\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.921066 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-scripts\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.921107 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/43a9322c-e5fe-40d8-849f-dc84a5763f9c-etc-swift\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.922093 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/43a9322c-e5fe-40d8-849f-dc84a5763f9c-etc-swift\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.922737 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-ring-data-devices\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.923130 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-scripts\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.927718 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-dispersionconf\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.928131 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-combined-ca-bundle\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.934299 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-swiftconf\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:39 crc kubenswrapper[4768]: I1203 16:39:39.959119 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tcbl\" (UniqueName: \"kubernetes.io/projected/43a9322c-e5fe-40d8-849f-dc84a5763f9c-kube-api-access-5tcbl\") pod \"swift-ring-rebalance-rxvbr\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.055480 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.255495 4768 generic.go:334] "Generic (PLEG): container finished" podID="9323f350-627d-4270-a0f4-a74e35261cc3" containerID="83282c89f9382e0c745df5314aecc0e6484d89e9aae67f5ef5aece60cf77fd61" exitCode=0 Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.255561 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" event={"ID":"9323f350-627d-4270-a0f4-a74e35261cc3","Type":"ContainerDied","Data":"83282c89f9382e0c745df5314aecc0e6484d89e9aae67f5ef5aece60cf77fd61"} Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.257823 4768 generic.go:334] "Generic (PLEG): container finished" podID="3f6d3b77-fbdf-4dfd-b473-3e8288367442" containerID="76e6a96fd53f3a504e129b9b2e1abe8b723da28ba2e99e00023ec0ae0fa34f80" exitCode=0 Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.257848 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3f6d3b77-fbdf-4dfd-b473-3e8288367442","Type":"ContainerDied","Data":"76e6a96fd53f3a504e129b9b2e1abe8b723da28ba2e99e00023ec0ae0fa34f80"} Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.259427 4768 generic.go:334] "Generic (PLEG): container finished" podID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerID="3d4feec0454a3b7dda5e04a16e4b8923bab55feae258d992d084d7237bc264b1" exitCode=0 Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.259517 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-vlllk" event={"ID":"ec1d296a-9fca-4e6d-8463-fc948d519080","Type":"ContainerDied","Data":"3d4feec0454a3b7dda5e04a16e4b8923bab55feae258d992d084d7237bc264b1"} Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.259539 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.269908 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329284 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/30a5a7f3-b504-40b9-97a3-5f1634bed034-etc-swift\") pod \"30a5a7f3-b504-40b9-97a3-5f1634bed034\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329344 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55wxg\" (UniqueName: \"kubernetes.io/projected/30a5a7f3-b504-40b9-97a3-5f1634bed034-kube-api-access-55wxg\") pod \"30a5a7f3-b504-40b9-97a3-5f1634bed034\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329373 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-swiftconf\") pod \"30a5a7f3-b504-40b9-97a3-5f1634bed034\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329401 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-dispersionconf\") pod \"30a5a7f3-b504-40b9-97a3-5f1634bed034\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329450 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-scripts\") pod \"30a5a7f3-b504-40b9-97a3-5f1634bed034\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329495 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-ring-data-devices\") pod \"30a5a7f3-b504-40b9-97a3-5f1634bed034\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329512 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-combined-ca-bundle\") pod \"30a5a7f3-b504-40b9-97a3-5f1634bed034\" (UID: \"30a5a7f3-b504-40b9-97a3-5f1634bed034\") " Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.329756 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30a5a7f3-b504-40b9-97a3-5f1634bed034-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "30a5a7f3-b504-40b9-97a3-5f1634bed034" (UID: "30a5a7f3-b504-40b9-97a3-5f1634bed034"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.330004 4768 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/30a5a7f3-b504-40b9-97a3-5f1634bed034-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.330889 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "30a5a7f3-b504-40b9-97a3-5f1634bed034" (UID: "30a5a7f3-b504-40b9-97a3-5f1634bed034"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.330962 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-scripts" (OuterVolumeSpecName: "scripts") pod "30a5a7f3-b504-40b9-97a3-5f1634bed034" (UID: "30a5a7f3-b504-40b9-97a3-5f1634bed034"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.337791 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30a5a7f3-b504-40b9-97a3-5f1634bed034-kube-api-access-55wxg" (OuterVolumeSpecName: "kube-api-access-55wxg") pod "30a5a7f3-b504-40b9-97a3-5f1634bed034" (UID: "30a5a7f3-b504-40b9-97a3-5f1634bed034"). InnerVolumeSpecName "kube-api-access-55wxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.338841 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "30a5a7f3-b504-40b9-97a3-5f1634bed034" (UID: "30a5a7f3-b504-40b9-97a3-5f1634bed034"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.339111 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30a5a7f3-b504-40b9-97a3-5f1634bed034" (UID: "30a5a7f3-b504-40b9-97a3-5f1634bed034"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.353850 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "30a5a7f3-b504-40b9-97a3-5f1634bed034" (UID: "30a5a7f3-b504-40b9-97a3-5f1634bed034"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.430823 4768 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.430850 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.430858 4768 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/30a5a7f3-b504-40b9-97a3-5f1634bed034-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.430867 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.430877 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55wxg\" (UniqueName: \"kubernetes.io/projected/30a5a7f3-b504-40b9-97a3-5f1634bed034-kube-api-access-55wxg\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.430887 4768 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/30a5a7f3-b504-40b9-97a3-5f1634bed034-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:40 crc kubenswrapper[4768]: I1203 16:39:40.838478 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:40 crc kubenswrapper[4768]: E1203 16:39:40.838666 4768 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:39:40 crc kubenswrapper[4768]: E1203 16:39:40.838697 4768 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:39:40 crc kubenswrapper[4768]: E1203 16:39:40.838762 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift podName:147901f2-6caa-4983-8e45-7e938cd9f36b nodeName:}" failed. No retries permitted until 2025-12-03 16:39:42.838740824 +0000 UTC m=+1279.758077257 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift") pod "swift-storage-0" (UID: "147901f2-6caa-4983-8e45-7e938cd9f36b") : configmap "swift-ring-files" not found Dec 03 16:39:41 crc kubenswrapper[4768]: I1203 16:39:41.270379 4768 generic.go:334] "Generic (PLEG): container finished" podID="c3dc1084-f02c-45ff-87de-22a8818905b4" containerID="7c9603f9d24efc6b3404655b33beca690cdea546c24da984ce2f60f730fa65a3" exitCode=0 Dec 03 16:39:41 crc kubenswrapper[4768]: I1203 16:39:41.270498 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3dc1084-f02c-45ff-87de-22a8818905b4","Type":"ContainerDied","Data":"7c9603f9d24efc6b3404655b33beca690cdea546c24da984ce2f60f730fa65a3"} Dec 03 16:39:41 crc kubenswrapper[4768]: I1203 16:39:41.270877 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6mrgn" Dec 03 16:39:41 crc kubenswrapper[4768]: I1203 16:39:41.585305 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/swift-ring-rebalance-6mrgn"] Dec 03 16:39:41 crc kubenswrapper[4768]: I1203 16:39:41.585362 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/swift-ring-rebalance-6mrgn"] Dec 03 16:39:41 crc kubenswrapper[4768]: I1203 16:39:41.890965 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-rxvbr"] Dec 03 16:39:42 crc kubenswrapper[4768]: W1203 16:39:42.825422 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43a9322c_e5fe_40d8_849f_dc84a5763f9c.slice/crio-773a2d4d59c9e7a53364e3f96e32e955b9de8c674e8ed11e5e9d2bc0724d04ba WatchSource:0}: Error finding container 773a2d4d59c9e7a53364e3f96e32e955b9de8c674e8ed11e5e9d2bc0724d04ba: Status 404 returned error can't find the container with id 773a2d4d59c9e7a53364e3f96e32e955b9de8c674e8ed11e5e9d2bc0724d04ba Dec 03 16:39:42 crc kubenswrapper[4768]: I1203 16:39:42.903154 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:42 crc kubenswrapper[4768]: E1203 16:39:42.903895 4768 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:39:42 crc kubenswrapper[4768]: E1203 16:39:42.903947 4768 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:39:42 crc kubenswrapper[4768]: E1203 16:39:42.904037 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift podName:147901f2-6caa-4983-8e45-7e938cd9f36b nodeName:}" failed. No retries permitted until 2025-12-03 16:39:46.904009729 +0000 UTC m=+1283.823346192 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift") pod "swift-storage-0" (UID: "147901f2-6caa-4983-8e45-7e938cd9f36b") : configmap "swift-ring-files" not found Dec 03 16:39:42 crc kubenswrapper[4768]: I1203 16:39:42.922999 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.005330 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r72h4\" (UniqueName: \"kubernetes.io/projected/9323f350-627d-4270-a0f4-a74e35261cc3-kube-api-access-r72h4\") pod \"9323f350-627d-4270-a0f4-a74e35261cc3\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.005457 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-config\") pod \"9323f350-627d-4270-a0f4-a74e35261cc3\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.005485 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-dns-svc\") pod \"9323f350-627d-4270-a0f4-a74e35261cc3\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.005543 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-sb\") pod \"9323f350-627d-4270-a0f4-a74e35261cc3\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.007044 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-nb\") pod \"9323f350-627d-4270-a0f4-a74e35261cc3\" (UID: \"9323f350-627d-4270-a0f4-a74e35261cc3\") " Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.013267 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9323f350-627d-4270-a0f4-a74e35261cc3-kube-api-access-r72h4" (OuterVolumeSpecName: "kube-api-access-r72h4") pod "9323f350-627d-4270-a0f4-a74e35261cc3" (UID: "9323f350-627d-4270-a0f4-a74e35261cc3"). InnerVolumeSpecName "kube-api-access-r72h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.066916 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9323f350-627d-4270-a0f4-a74e35261cc3" (UID: "9323f350-627d-4270-a0f4-a74e35261cc3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.081972 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-config" (OuterVolumeSpecName: "config") pod "9323f350-627d-4270-a0f4-a74e35261cc3" (UID: "9323f350-627d-4270-a0f4-a74e35261cc3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.090053 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9323f350-627d-4270-a0f4-a74e35261cc3" (UID: "9323f350-627d-4270-a0f4-a74e35261cc3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.098730 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9323f350-627d-4270-a0f4-a74e35261cc3" (UID: "9323f350-627d-4270-a0f4-a74e35261cc3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.109970 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.110001 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.110011 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.110021 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9323f350-627d-4270-a0f4-a74e35261cc3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.110034 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r72h4\" (UniqueName: \"kubernetes.io/projected/9323f350-627d-4270-a0f4-a74e35261cc3-kube-api-access-r72h4\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.289706 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-vlllk" event={"ID":"ec1d296a-9fca-4e6d-8463-fc948d519080","Type":"ContainerStarted","Data":"c3c9d658027b470b635e86d7e62452e77fcb774e7a473166f03acadf73b8f4e6"} Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.289863 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.290841 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxvbr" event={"ID":"43a9322c-e5fe-40d8-849f-dc84a5763f9c","Type":"ContainerStarted","Data":"773a2d4d59c9e7a53364e3f96e32e955b9de8c674e8ed11e5e9d2bc0724d04ba"} Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.295914 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"3f6d3b77-fbdf-4dfd-b473-3e8288367442","Type":"ContainerStarted","Data":"eec1b1ee33aadac4bcc98ec07aae31941eb3a3507671f201a4d80608fe570d60"} Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.298607 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"c3dc1084-f02c-45ff-87de-22a8818905b4","Type":"ContainerStarted","Data":"e2678c6d1057eeb248bdbedb505b316424d7c8dc5afb5ba75daec70fe44e19b0"} Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.303775 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" event={"ID":"9323f350-627d-4270-a0f4-a74e35261cc3","Type":"ContainerDied","Data":"08c3a0b23365843df5e831c0ae3fcedfef113b49a112788700026593948f075f"} Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.303814 4768 scope.go:117] "RemoveContainer" containerID="83282c89f9382e0c745df5314aecc0e6484d89e9aae67f5ef5aece60cf77fd61" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.303985 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-sdrjd" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.308085 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-vlllk" podStartSLOduration=5.3080685 podStartE2EDuration="5.3080685s" podCreationTimestamp="2025-12-03 16:39:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:43.305498581 +0000 UTC m=+1280.224835004" watchObservedRunningTime="2025-12-03 16:39:43.3080685 +0000 UTC m=+1280.227404923" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.329268 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371976.525526 podStartE2EDuration="1m0.329249879s" podCreationTimestamp="2025-12-03 16:38:43 +0000 UTC" firstStartedPulling="2025-12-03 16:38:58.585096512 +0000 UTC m=+1235.504432935" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:43.326712071 +0000 UTC m=+1280.246048494" watchObservedRunningTime="2025-12-03 16:39:43.329249879 +0000 UTC m=+1280.248586302" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.346288 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=24.95208611 podStartE2EDuration="59.346271106s" podCreationTimestamp="2025-12-03 16:38:44 +0000 UTC" firstStartedPulling="2025-12-03 16:38:58.682465797 +0000 UTC m=+1235.601802220" lastFinishedPulling="2025-12-03 16:39:33.076650793 +0000 UTC m=+1269.995987216" observedRunningTime="2025-12-03 16:39:43.344880949 +0000 UTC m=+1280.264217372" watchObservedRunningTime="2025-12-03 16:39:43.346271106 +0000 UTC m=+1280.265607529" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.364957 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-sdrjd"] Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.375036 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-sdrjd"] Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.549514 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30a5a7f3-b504-40b9-97a3-5f1634bed034" path="/var/lib/kubelet/pods/30a5a7f3-b504-40b9-97a3-5f1634bed034/volumes" Dec 03 16:39:43 crc kubenswrapper[4768]: I1203 16:39:43.549974 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9323f350-627d-4270-a0f4-a74e35261cc3" path="/var/lib/kubelet/pods/9323f350-627d-4270-a0f4-a74e35261cc3/volumes" Dec 03 16:39:45 crc kubenswrapper[4768]: I1203 16:39:45.062131 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 03 16:39:45 crc kubenswrapper[4768]: I1203 16:39:45.062379 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 03 16:39:46 crc kubenswrapper[4768]: I1203 16:39:46.227039 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 03 16:39:46 crc kubenswrapper[4768]: I1203 16:39:46.227459 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 03 16:39:46 crc kubenswrapper[4768]: I1203 16:39:46.658720 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-548665d79b-qvtkc" Dec 03 16:39:46 crc kubenswrapper[4768]: I1203 16:39:46.982473 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:46 crc kubenswrapper[4768]: E1203 16:39:46.982676 4768 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:39:46 crc kubenswrapper[4768]: E1203 16:39:46.982698 4768 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:39:46 crc kubenswrapper[4768]: E1203 16:39:46.982749 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift podName:147901f2-6caa-4983-8e45-7e938cd9f36b nodeName:}" failed. No retries permitted until 2025-12-03 16:39:54.982731717 +0000 UTC m=+1291.902068140 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift") pod "swift-storage-0" (UID: "147901f2-6caa-4983-8e45-7e938cd9f36b") : configmap "swift-ring-files" not found Dec 03 16:39:47 crc kubenswrapper[4768]: I1203 16:39:47.405792 4768 scope.go:117] "RemoveContainer" containerID="8d99cad7b59c8235d2b4fd37cbd082cf637e8f8c0ad462acf4a658800fafece0" Dec 03 16:39:47 crc kubenswrapper[4768]: I1203 16:39:47.703361 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.078637 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.359818 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"a0d45532-8a91-4fa5-a7b5-21fdcf44160e","Type":"ContainerStarted","Data":"756f65b2a67ccfb7fe006ed73f6eadced53d3f08598f5fa8abc87ad217a176f7"} Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.364254 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerStarted","Data":"6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d"} Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.366673 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f9cb5864-2bfb-49c0-8124-b11beb8fdad1","Type":"ContainerStarted","Data":"ec7fe5c71484442a5ba7bbaf02dbe77c372a23589cc40c717bf158ba0ad92a44"} Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.388449 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=7.551449104 podStartE2EDuration="56.388430159s" podCreationTimestamp="2025-12-03 16:38:52 +0000 UTC" firstStartedPulling="2025-12-03 16:38:58.677400031 +0000 UTC m=+1235.596736454" lastFinishedPulling="2025-12-03 16:39:47.514381096 +0000 UTC m=+1284.433717509" observedRunningTime="2025-12-03 16:39:48.382394727 +0000 UTC m=+1285.301731150" watchObservedRunningTime="2025-12-03 16:39:48.388430159 +0000 UTC m=+1285.307766572" Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.480796 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.537766 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7rr8d"] Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.538175 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" podUID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerName="dnsmasq-dns" containerID="cri-o://ea6663d65d89340cda6cfd9a42f4d1ca76ca166cc987cde54c76105a8d4e3ab0" gracePeriod=10 Dec 03 16:39:48 crc kubenswrapper[4768]: I1203 16:39:48.592908 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 03 16:39:49 crc kubenswrapper[4768]: I1203 16:39:49.389162 4768 generic.go:334] "Generic (PLEG): container finished" podID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerID="ea6663d65d89340cda6cfd9a42f4d1ca76ca166cc987cde54c76105a8d4e3ab0" exitCode=0 Dec 03 16:39:49 crc kubenswrapper[4768]: I1203 16:39:49.390679 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" event={"ID":"0e784157-56f0-4c49-a361-4ffc9edecf2b","Type":"ContainerDied","Data":"ea6663d65d89340cda6cfd9a42f4d1ca76ca166cc987cde54c76105a8d4e3ab0"} Dec 03 16:39:50 crc kubenswrapper[4768]: I1203 16:39:50.123111 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 03 16:39:50 crc kubenswrapper[4768]: I1203 16:39:50.220479 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 03 16:39:50 crc kubenswrapper[4768]: I1203 16:39:50.398933 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerStarted","Data":"154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2"} Dec 03 16:39:50 crc kubenswrapper[4768]: E1203 16:39:50.418755 4768 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.38:53500->38.102.83.38:40273: write tcp 38.102.83.38:53500->38.102.83.38:40273: write: broken pipe Dec 03 16:39:50 crc kubenswrapper[4768]: I1203 16:39:50.593312 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 03 16:39:50 crc kubenswrapper[4768]: I1203 16:39:50.643359 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.498920 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f9cb5864-2bfb-49c0-8124-b11beb8fdad1","Type":"ContainerStarted","Data":"fd0e4ef449eb56afb925ca6c86210eabeec5f03d3f8988b5736325843c341f8f"} Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.499782 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.512457 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.537393 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=14.680683652999999 podStartE2EDuration="1m3.537359747s" podCreationTimestamp="2025-12-03 16:38:48 +0000 UTC" firstStartedPulling="2025-12-03 16:38:58.549148116 +0000 UTC m=+1235.468484549" lastFinishedPulling="2025-12-03 16:39:47.40582422 +0000 UTC m=+1284.325160643" observedRunningTime="2025-12-03 16:39:51.528041057 +0000 UTC m=+1288.447377480" watchObservedRunningTime="2025-12-03 16:39:51.537359747 +0000 UTC m=+1288.456696170" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.625857 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.681885 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5vkj\" (UniqueName: \"kubernetes.io/projected/0e784157-56f0-4c49-a361-4ffc9edecf2b-kube-api-access-h5vkj\") pod \"0e784157-56f0-4c49-a361-4ffc9edecf2b\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.681950 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-dns-svc\") pod \"0e784157-56f0-4c49-a361-4ffc9edecf2b\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.682055 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-config\") pod \"0e784157-56f0-4c49-a361-4ffc9edecf2b\" (UID: \"0e784157-56f0-4c49-a361-4ffc9edecf2b\") " Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.690669 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e784157-56f0-4c49-a361-4ffc9edecf2b-kube-api-access-h5vkj" (OuterVolumeSpecName: "kube-api-access-h5vkj") pod "0e784157-56f0-4c49-a361-4ffc9edecf2b" (UID: "0e784157-56f0-4c49-a361-4ffc9edecf2b"). InnerVolumeSpecName "kube-api-access-h5vkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.730386 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-config" (OuterVolumeSpecName: "config") pod "0e784157-56f0-4c49-a361-4ffc9edecf2b" (UID: "0e784157-56f0-4c49-a361-4ffc9edecf2b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.742996 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0e784157-56f0-4c49-a361-4ffc9edecf2b" (UID: "0e784157-56f0-4c49-a361-4ffc9edecf2b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.784614 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5vkj\" (UniqueName: \"kubernetes.io/projected/0e784157-56f0-4c49-a361-4ffc9edecf2b-kube-api-access-h5vkj\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.784647 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:51 crc kubenswrapper[4768]: I1203 16:39:51.784655 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e784157-56f0-4c49-a361-4ffc9edecf2b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.340035 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.428171 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.538095 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.543224 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-7rr8d" event={"ID":"0e784157-56f0-4c49-a361-4ffc9edecf2b","Type":"ContainerDied","Data":"309f2e9c3414b100cd3ba464bf90abf2509d4b14fbac182f7f5490d61a108c17"} Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.543310 4768 scope.go:117] "RemoveContainer" containerID="ea6663d65d89340cda6cfd9a42f4d1ca76ca166cc987cde54c76105a8d4e3ab0" Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.547227 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxvbr" event={"ID":"43a9322c-e5fe-40d8-849f-dc84a5763f9c","Type":"ContainerStarted","Data":"1c8eeb33a51f49bc4811d3f3c8f8bcf62eca26e7f12ea0381e7c10aa7e206b58"} Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.577820 4768 scope.go:117] "RemoveContainer" containerID="2a7a84b7c943f8e380ea92a23f7ec63e51705f408613d4be8503b1c97a9ea703" Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.588988 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-rxvbr" podStartSLOduration=5.21771371 podStartE2EDuration="13.588951469s" podCreationTimestamp="2025-12-03 16:39:39 +0000 UTC" firstStartedPulling="2025-12-03 16:39:42.827458693 +0000 UTC m=+1279.746795146" lastFinishedPulling="2025-12-03 16:39:51.198696482 +0000 UTC m=+1288.118032905" observedRunningTime="2025-12-03 16:39:52.573467643 +0000 UTC m=+1289.492804076" watchObservedRunningTime="2025-12-03 16:39:52.588951469 +0000 UTC m=+1289.508287892" Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.599654 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7rr8d"] Dec 03 16:39:52 crc kubenswrapper[4768]: I1203 16:39:52.607685 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-7rr8d"] Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.552215 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e784157-56f0-4c49-a361-4ffc9edecf2b" path="/var/lib/kubelet/pods/0e784157-56f0-4c49-a361-4ffc9edecf2b/volumes" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.637357 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.796750 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:39:53 crc kubenswrapper[4768]: E1203 16:39:53.797458 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9323f350-627d-4270-a0f4-a74e35261cc3" containerName="dnsmasq-dns" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.797482 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="9323f350-627d-4270-a0f4-a74e35261cc3" containerName="dnsmasq-dns" Dec 03 16:39:53 crc kubenswrapper[4768]: E1203 16:39:53.797546 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerName="init" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.797558 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerName="init" Dec 03 16:39:53 crc kubenswrapper[4768]: E1203 16:39:53.797582 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerName="dnsmasq-dns" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.797620 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerName="dnsmasq-dns" Dec 03 16:39:53 crc kubenswrapper[4768]: E1203 16:39:53.797674 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9323f350-627d-4270-a0f4-a74e35261cc3" containerName="init" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.797716 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="9323f350-627d-4270-a0f4-a74e35261cc3" containerName="init" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.798083 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="9323f350-627d-4270-a0f4-a74e35261cc3" containerName="dnsmasq-dns" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.798104 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e784157-56f0-4c49-a361-4ffc9edecf2b" containerName="dnsmasq-dns" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.799662 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.803323 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.804148 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-k4rjg" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.811951 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.811951 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.826475 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.829802 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.829855 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.829883 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0157f48e-0d1b-492c-8dc5-c859820905d8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.829902 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.829925 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0157f48e-0d1b-492c-8dc5-c859820905d8-scripts\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.829999 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0157f48e-0d1b-492c-8dc5-c859820905d8-config\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.830071 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92vbh\" (UniqueName: \"kubernetes.io/projected/0157f48e-0d1b-492c-8dc5-c859820905d8-kube-api-access-92vbh\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.931677 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0157f48e-0d1b-492c-8dc5-c859820905d8-config\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.931767 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92vbh\" (UniqueName: \"kubernetes.io/projected/0157f48e-0d1b-492c-8dc5-c859820905d8-kube-api-access-92vbh\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.931817 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.931847 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.931876 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0157f48e-0d1b-492c-8dc5-c859820905d8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.931898 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.931935 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0157f48e-0d1b-492c-8dc5-c859820905d8-scripts\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.932799 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0157f48e-0d1b-492c-8dc5-c859820905d8-scripts\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.932928 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0157f48e-0d1b-492c-8dc5-c859820905d8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.933082 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0157f48e-0d1b-492c-8dc5-c859820905d8-config\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.939145 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.940285 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.946384 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0157f48e-0d1b-492c-8dc5-c859820905d8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:53 crc kubenswrapper[4768]: I1203 16:39:53.959734 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92vbh\" (UniqueName: \"kubernetes.io/projected/0157f48e-0d1b-492c-8dc5-c859820905d8-kube-api-access-92vbh\") pod \"ovn-northd-0\" (UID: \"0157f48e-0d1b-492c-8dc5-c859820905d8\") " pod="openstack/ovn-northd-0" Dec 03 16:39:54 crc kubenswrapper[4768]: I1203 16:39:54.127730 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 03 16:39:55 crc kubenswrapper[4768]: I1203 16:39:55.063434 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:39:55 crc kubenswrapper[4768]: E1203 16:39:55.063627 4768 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 03 16:39:55 crc kubenswrapper[4768]: E1203 16:39:55.064153 4768 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 03 16:39:55 crc kubenswrapper[4768]: E1203 16:39:55.064792 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift podName:147901f2-6caa-4983-8e45-7e938cd9f36b nodeName:}" failed. No retries permitted until 2025-12-03 16:40:11.064746519 +0000 UTC m=+1307.984082942 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift") pod "swift-storage-0" (UID: "147901f2-6caa-4983-8e45-7e938cd9f36b") : configmap "swift-ring-files" not found Dec 03 16:39:55 crc kubenswrapper[4768]: I1203 16:39:55.375995 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 03 16:39:55 crc kubenswrapper[4768]: I1203 16:39:55.574310 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0157f48e-0d1b-492c-8dc5-c859820905d8","Type":"ContainerStarted","Data":"cabade8008dbd6d2314c7713bdb4a5258a37cdb67fc58bc51e3fc40012f039a4"} Dec 03 16:39:55 crc kubenswrapper[4768]: I1203 16:39:55.577932 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerStarted","Data":"512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6"} Dec 03 16:39:55 crc kubenswrapper[4768]: I1203 16:39:55.600253 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=12.956941834 podStartE2EDuration="1m8.60023327s" podCreationTimestamp="2025-12-03 16:38:47 +0000 UTC" firstStartedPulling="2025-12-03 16:38:59.388968711 +0000 UTC m=+1236.308305134" lastFinishedPulling="2025-12-03 16:39:55.032260147 +0000 UTC m=+1291.951596570" observedRunningTime="2025-12-03 16:39:55.595821422 +0000 UTC m=+1292.515157865" watchObservedRunningTime="2025-12-03 16:39:55.60023327 +0000 UTC m=+1292.519569693" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.028925 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.029509 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.146985 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-jc27s"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.149197 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.167141 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-jc27s"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.176617 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-49e4-account-create-update-v4lpm"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.178259 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.180633 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.188052 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh9sd\" (UniqueName: \"kubernetes.io/projected/dcf80a9b-b333-4ebb-b757-0306230722df-kube-api-access-nh9sd\") pod \"keystone-db-create-jc27s\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.188125 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcf80a9b-b333-4ebb-b757-0306230722df-operator-scripts\") pod \"keystone-db-create-jc27s\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.191199 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-49e4-account-create-update-v4lpm"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.289967 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m52h\" (UniqueName: \"kubernetes.io/projected/2bc7eee1-578b-4091-8261-2f27ad2122cc-kube-api-access-5m52h\") pod \"keystone-49e4-account-create-update-v4lpm\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.290164 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc7eee1-578b-4091-8261-2f27ad2122cc-operator-scripts\") pod \"keystone-49e4-account-create-update-v4lpm\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.290246 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh9sd\" (UniqueName: \"kubernetes.io/projected/dcf80a9b-b333-4ebb-b757-0306230722df-kube-api-access-nh9sd\") pod \"keystone-db-create-jc27s\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.290886 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcf80a9b-b333-4ebb-b757-0306230722df-operator-scripts\") pod \"keystone-db-create-jc27s\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.291968 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcf80a9b-b333-4ebb-b757-0306230722df-operator-scripts\") pod \"keystone-db-create-jc27s\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.313828 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh9sd\" (UniqueName: \"kubernetes.io/projected/dcf80a9b-b333-4ebb-b757-0306230722df-kube-api-access-nh9sd\") pod \"keystone-db-create-jc27s\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.364186 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-9zmrs"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.365622 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.373283 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-ac7a-account-create-update-p8klf"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.374541 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.379442 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.392452 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9zmrs"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.393557 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m52h\" (UniqueName: \"kubernetes.io/projected/2bc7eee1-578b-4091-8261-2f27ad2122cc-kube-api-access-5m52h\") pod \"keystone-49e4-account-create-update-v4lpm\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.393625 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad42c9c3-1853-43c3-a434-23d0889b8dd4-operator-scripts\") pod \"placement-db-create-9zmrs\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.393673 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95e13608-2d5c-43c8-a443-715a32b7edda-operator-scripts\") pod \"placement-ac7a-account-create-update-p8klf\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.393728 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mp2t\" (UniqueName: \"kubernetes.io/projected/95e13608-2d5c-43c8-a443-715a32b7edda-kube-api-access-9mp2t\") pod \"placement-ac7a-account-create-update-p8klf\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.393761 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksxnm\" (UniqueName: \"kubernetes.io/projected/ad42c9c3-1853-43c3-a434-23d0889b8dd4-kube-api-access-ksxnm\") pod \"placement-db-create-9zmrs\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.393842 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc7eee1-578b-4091-8261-2f27ad2122cc-operator-scripts\") pod \"keystone-49e4-account-create-update-v4lpm\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.398116 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc7eee1-578b-4091-8261-2f27ad2122cc-operator-scripts\") pod \"keystone-49e4-account-create-update-v4lpm\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.403181 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ac7a-account-create-update-p8klf"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.420261 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m52h\" (UniqueName: \"kubernetes.io/projected/2bc7eee1-578b-4091-8261-2f27ad2122cc-kube-api-access-5m52h\") pod \"keystone-49e4-account-create-update-v4lpm\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.455320 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-56cd74f89f-x5fb7" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.495673 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad42c9c3-1853-43c3-a434-23d0889b8dd4-operator-scripts\") pod \"placement-db-create-9zmrs\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.495755 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95e13608-2d5c-43c8-a443-715a32b7edda-operator-scripts\") pod \"placement-ac7a-account-create-update-p8klf\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.495806 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mp2t\" (UniqueName: \"kubernetes.io/projected/95e13608-2d5c-43c8-a443-715a32b7edda-kube-api-access-9mp2t\") pod \"placement-ac7a-account-create-update-p8klf\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.495834 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksxnm\" (UniqueName: \"kubernetes.io/projected/ad42c9c3-1853-43c3-a434-23d0889b8dd4-kube-api-access-ksxnm\") pod \"placement-db-create-9zmrs\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.498550 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad42c9c3-1853-43c3-a434-23d0889b8dd4-operator-scripts\") pod \"placement-db-create-9zmrs\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.499226 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95e13608-2d5c-43c8-a443-715a32b7edda-operator-scripts\") pod \"placement-ac7a-account-create-update-p8klf\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.502374 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jc27s" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.515087 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.515399 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksxnm\" (UniqueName: \"kubernetes.io/projected/ad42c9c3-1853-43c3-a434-23d0889b8dd4-kube-api-access-ksxnm\") pod \"placement-db-create-9zmrs\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.517869 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mp2t\" (UniqueName: \"kubernetes.io/projected/95e13608-2d5c-43c8-a443-715a32b7edda-kube-api-access-9mp2t\") pod \"placement-ac7a-account-create-update-p8klf\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.665554 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-nlld2"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.666781 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nlld2" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.683850 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nlld2"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.693262 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9zmrs" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.698926 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64xjk\" (UniqueName: \"kubernetes.io/projected/b709be2c-2e7f-4013-889f-cff78d262a56-kube-api-access-64xjk\") pod \"glance-db-create-nlld2\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " pod="openstack/glance-db-create-nlld2" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.699157 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b709be2c-2e7f-4013-889f-cff78d262a56-operator-scripts\") pod \"glance-db-create-nlld2\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " pod="openstack/glance-db-create-nlld2" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.720574 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.763589 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-976f-account-create-update-gsp5b"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.764776 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.768973 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.771326 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-976f-account-create-update-gsp5b"] Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.781883 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-779849886d-xc5w8" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.802124 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f5nw\" (UniqueName: \"kubernetes.io/projected/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-kube-api-access-7f5nw\") pod \"glance-976f-account-create-update-gsp5b\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.802170 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-operator-scripts\") pod \"glance-976f-account-create-update-gsp5b\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.802247 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64xjk\" (UniqueName: \"kubernetes.io/projected/b709be2c-2e7f-4013-889f-cff78d262a56-kube-api-access-64xjk\") pod \"glance-db-create-nlld2\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " pod="openstack/glance-db-create-nlld2" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.802396 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b709be2c-2e7f-4013-889f-cff78d262a56-operator-scripts\") pod \"glance-db-create-nlld2\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " pod="openstack/glance-db-create-nlld2" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.804191 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b709be2c-2e7f-4013-889f-cff78d262a56-operator-scripts\") pod \"glance-db-create-nlld2\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " pod="openstack/glance-db-create-nlld2" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.845714 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64xjk\" (UniqueName: \"kubernetes.io/projected/b709be2c-2e7f-4013-889f-cff78d262a56-kube-api-access-64xjk\") pod \"glance-db-create-nlld2\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " pod="openstack/glance-db-create-nlld2" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.904209 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f5nw\" (UniqueName: \"kubernetes.io/projected/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-kube-api-access-7f5nw\") pod \"glance-976f-account-create-update-gsp5b\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.904257 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-operator-scripts\") pod \"glance-976f-account-create-update-gsp5b\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.905472 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-operator-scripts\") pod \"glance-976f-account-create-update-gsp5b\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.924210 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f5nw\" (UniqueName: \"kubernetes.io/projected/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-kube-api-access-7f5nw\") pod \"glance-976f-account-create-update-gsp5b\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:56 crc kubenswrapper[4768]: I1203 16:39:56.986982 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nlld2" Dec 03 16:39:57 crc kubenswrapper[4768]: I1203 16:39:57.083889 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:39:57 crc kubenswrapper[4768]: I1203 16:39:57.702710 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5c0ad451-c513-4f94-ac08-aaa2c7df9ae8" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 16:39:57 crc kubenswrapper[4768]: I1203 16:39:57.741517 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-49e4-account-create-update-v4lpm"] Dec 03 16:39:57 crc kubenswrapper[4768]: W1203 16:39:57.755357 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95e13608_2d5c_43c8_a443_715a32b7edda.slice/crio-def7fd44ecf80cf282b99e70d2fa4cb4154641c0a03d3784efe3518ce28ac804 WatchSource:0}: Error finding container def7fd44ecf80cf282b99e70d2fa4cb4154641c0a03d3784efe3518ce28ac804: Status 404 returned error can't find the container with id def7fd44ecf80cf282b99e70d2fa4cb4154641c0a03d3784efe3518ce28ac804 Dec 03 16:39:57 crc kubenswrapper[4768]: I1203 16:39:57.758469 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-ac7a-account-create-update-p8klf"] Dec 03 16:39:57 crc kubenswrapper[4768]: I1203 16:39:57.768350 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nlld2"] Dec 03 16:39:57 crc kubenswrapper[4768]: W1203 16:39:57.770487 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddcf80a9b_b333_4ebb_b757_0306230722df.slice/crio-419b46eb573cd18db2a684a0e2f3aa4e1eb0b5c8dc9f393364843746a93f8ba4 WatchSource:0}: Error finding container 419b46eb573cd18db2a684a0e2f3aa4e1eb0b5c8dc9f393364843746a93f8ba4: Status 404 returned error can't find the container with id 419b46eb573cd18db2a684a0e2f3aa4e1eb0b5c8dc9f393364843746a93f8ba4 Dec 03 16:39:57 crc kubenswrapper[4768]: I1203 16:39:57.776175 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-jc27s"] Dec 03 16:39:57 crc kubenswrapper[4768]: W1203 16:39:57.897457 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad42c9c3_1853_43c3_a434_23d0889b8dd4.slice/crio-73ee1c87c9c2a434dd73ced43f7809df1cb669d582bb8f805f29772c709a203c WatchSource:0}: Error finding container 73ee1c87c9c2a434dd73ced43f7809df1cb669d582bb8f805f29772c709a203c: Status 404 returned error can't find the container with id 73ee1c87c9c2a434dd73ced43f7809df1cb669d582bb8f805f29772c709a203c Dec 03 16:39:57 crc kubenswrapper[4768]: I1203 16:39:57.902051 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-9zmrs"] Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.008370 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-976f-account-create-update-gsp5b"] Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.256080 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.629930 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0157f48e-0d1b-492c-8dc5-c859820905d8","Type":"ContainerStarted","Data":"cd36501043ca6307a6a22acc01a8346e25df1b0c2bdc6b54d38dc03e3803aed7"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.631255 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-976f-account-create-update-gsp5b" event={"ID":"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4","Type":"ContainerStarted","Data":"b0fbd670114b36ce9f2697718d82351758e858795cfac8f1ca088d0a994444d0"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.631281 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-976f-account-create-update-gsp5b" event={"ID":"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4","Type":"ContainerStarted","Data":"add0bfa42381e5c2d5f420e0f128cc8abd12df0794118517f5dc4a73f6a2c59e"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.632867 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ac7a-account-create-update-p8klf" event={"ID":"95e13608-2d5c-43c8-a443-715a32b7edda","Type":"ContainerStarted","Data":"e73bda5eb724d99f05684f50b3a69bf527da823430998f0b3ba0af6925e769b4"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.632893 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ac7a-account-create-update-p8klf" event={"ID":"95e13608-2d5c-43c8-a443-715a32b7edda","Type":"ContainerStarted","Data":"def7fd44ecf80cf282b99e70d2fa4cb4154641c0a03d3784efe3518ce28ac804"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.634049 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49e4-account-create-update-v4lpm" event={"ID":"2bc7eee1-578b-4091-8261-2f27ad2122cc","Type":"ContainerStarted","Data":"7b2c7e5607a6e9c3f01cf02cad648a4eef7d15bc628b57b0eeb0ed1a65abb194"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.634163 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49e4-account-create-update-v4lpm" event={"ID":"2bc7eee1-578b-4091-8261-2f27ad2122cc","Type":"ContainerStarted","Data":"fad7db735a00ce7a80c17675991c50bc471085ae8552dbd51faa47d033be4b21"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.641126 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9zmrs" event={"ID":"ad42c9c3-1853-43c3-a434-23d0889b8dd4","Type":"ContainerStarted","Data":"a939e7a084def3cbe0569596f865278e89d78443ff2816aa7bc18a9b650f10dd"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.641168 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9zmrs" event={"ID":"ad42c9c3-1853-43c3-a434-23d0889b8dd4","Type":"ContainerStarted","Data":"73ee1c87c9c2a434dd73ced43f7809df1cb669d582bb8f805f29772c709a203c"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.655438 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-976f-account-create-update-gsp5b" podStartSLOduration=2.65541741 podStartE2EDuration="2.65541741s" podCreationTimestamp="2025-12-03 16:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:58.645246877 +0000 UTC m=+1295.564583300" watchObservedRunningTime="2025-12-03 16:39:58.65541741 +0000 UTC m=+1295.574753833" Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.658848 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nlld2" event={"ID":"b709be2c-2e7f-4013-889f-cff78d262a56","Type":"ContainerStarted","Data":"611a5bbb7bf89b1c4b8b8167dd4506a0883ebfc17dbd4b5252a7991d61348458"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.658900 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nlld2" event={"ID":"b709be2c-2e7f-4013-889f-cff78d262a56","Type":"ContainerStarted","Data":"4defac346a65252c269f9ea35107c6dd90033d5c58312877b43d2b76363faa4e"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.666228 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jc27s" event={"ID":"dcf80a9b-b333-4ebb-b757-0306230722df","Type":"ContainerStarted","Data":"c7ac60d5fd14fa49377774b943f6dd3e1c35f927a07a9f362fd7899257aa9902"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.669775 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jc27s" event={"ID":"dcf80a9b-b333-4ebb-b757-0306230722df","Type":"ContainerStarted","Data":"419b46eb573cd18db2a684a0e2f3aa4e1eb0b5c8dc9f393364843746a93f8ba4"} Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.673088 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-ac7a-account-create-update-p8klf" podStartSLOduration=2.673066854 podStartE2EDuration="2.673066854s" podCreationTimestamp="2025-12-03 16:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:58.662649205 +0000 UTC m=+1295.581985638" watchObservedRunningTime="2025-12-03 16:39:58.673066854 +0000 UTC m=+1295.592403277" Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.682071 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-49e4-account-create-update-v4lpm" podStartSLOduration=2.6820510459999998 podStartE2EDuration="2.682051046s" podCreationTimestamp="2025-12-03 16:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:58.676277671 +0000 UTC m=+1295.595614094" watchObservedRunningTime="2025-12-03 16:39:58.682051046 +0000 UTC m=+1295.601387469" Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.694742 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-9zmrs" podStartSLOduration=2.694715456 podStartE2EDuration="2.694715456s" podCreationTimestamp="2025-12-03 16:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:58.690855692 +0000 UTC m=+1295.610192115" watchObservedRunningTime="2025-12-03 16:39:58.694715456 +0000 UTC m=+1295.614051879" Dec 03 16:39:58 crc kubenswrapper[4768]: I1203 16:39:58.766517 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-nlld2" podStartSLOduration=2.766492543 podStartE2EDuration="2.766492543s" podCreationTimestamp="2025-12-03 16:39:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:39:58.728266437 +0000 UTC m=+1295.647602860" watchObservedRunningTime="2025-12-03 16:39:58.766492543 +0000 UTC m=+1295.685828966" Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.336798 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.682308 4768 generic.go:334] "Generic (PLEG): container finished" podID="ad42c9c3-1853-43c3-a434-23d0889b8dd4" containerID="a939e7a084def3cbe0569596f865278e89d78443ff2816aa7bc18a9b650f10dd" exitCode=0 Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.682427 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9zmrs" event={"ID":"ad42c9c3-1853-43c3-a434-23d0889b8dd4","Type":"ContainerDied","Data":"a939e7a084def3cbe0569596f865278e89d78443ff2816aa7bc18a9b650f10dd"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.685243 4768 generic.go:334] "Generic (PLEG): container finished" podID="b709be2c-2e7f-4013-889f-cff78d262a56" containerID="611a5bbb7bf89b1c4b8b8167dd4506a0883ebfc17dbd4b5252a7991d61348458" exitCode=0 Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.685325 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nlld2" event={"ID":"b709be2c-2e7f-4013-889f-cff78d262a56","Type":"ContainerDied","Data":"611a5bbb7bf89b1c4b8b8167dd4506a0883ebfc17dbd4b5252a7991d61348458"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.687940 4768 generic.go:334] "Generic (PLEG): container finished" podID="dcf80a9b-b333-4ebb-b757-0306230722df" containerID="c7ac60d5fd14fa49377774b943f6dd3e1c35f927a07a9f362fd7899257aa9902" exitCode=0 Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.688028 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jc27s" event={"ID":"dcf80a9b-b333-4ebb-b757-0306230722df","Type":"ContainerDied","Data":"c7ac60d5fd14fa49377774b943f6dd3e1c35f927a07a9f362fd7899257aa9902"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.692090 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0157f48e-0d1b-492c-8dc5-c859820905d8","Type":"ContainerStarted","Data":"fca8dea5a2d1e7840061afd3fb022c7d8b082e6d225c1306b5dc0233ce20ade5"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.692493 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.699265 4768 generic.go:334] "Generic (PLEG): container finished" podID="56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4" containerID="b0fbd670114b36ce9f2697718d82351758e858795cfac8f1ca088d0a994444d0" exitCode=0 Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.699502 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-976f-account-create-update-gsp5b" event={"ID":"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4","Type":"ContainerDied","Data":"b0fbd670114b36ce9f2697718d82351758e858795cfac8f1ca088d0a994444d0"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.709935 4768 generic.go:334] "Generic (PLEG): container finished" podID="95e13608-2d5c-43c8-a443-715a32b7edda" containerID="e73bda5eb724d99f05684f50b3a69bf527da823430998f0b3ba0af6925e769b4" exitCode=0 Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.710083 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ac7a-account-create-update-p8klf" event={"ID":"95e13608-2d5c-43c8-a443-715a32b7edda","Type":"ContainerDied","Data":"e73bda5eb724d99f05684f50b3a69bf527da823430998f0b3ba0af6925e769b4"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.715047 4768 generic.go:334] "Generic (PLEG): container finished" podID="43a9322c-e5fe-40d8-849f-dc84a5763f9c" containerID="1c8eeb33a51f49bc4811d3f3c8f8bcf62eca26e7f12ea0381e7c10aa7e206b58" exitCode=0 Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.715386 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxvbr" event={"ID":"43a9322c-e5fe-40d8-849f-dc84a5763f9c","Type":"ContainerDied","Data":"1c8eeb33a51f49bc4811d3f3c8f8bcf62eca26e7f12ea0381e7c10aa7e206b58"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.720836 4768 generic.go:334] "Generic (PLEG): container finished" podID="2bc7eee1-578b-4091-8261-2f27ad2122cc" containerID="7b2c7e5607a6e9c3f01cf02cad648a4eef7d15bc628b57b0eeb0ed1a65abb194" exitCode=0 Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.721118 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49e4-account-create-update-v4lpm" event={"ID":"2bc7eee1-578b-4091-8261-2f27ad2122cc","Type":"ContainerDied","Data":"7b2c7e5607a6e9c3f01cf02cad648a4eef7d15bc628b57b0eeb0ed1a65abb194"} Dec 03 16:39:59 crc kubenswrapper[4768]: I1203 16:39:59.760656 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=5.031087704 podStartE2EDuration="6.760627572s" podCreationTimestamp="2025-12-03 16:39:53 +0000 UTC" firstStartedPulling="2025-12-03 16:39:55.388390671 +0000 UTC m=+1292.307727094" lastFinishedPulling="2025-12-03 16:39:57.117930529 +0000 UTC m=+1294.037266962" observedRunningTime="2025-12-03 16:39:59.737865761 +0000 UTC m=+1296.657202184" watchObservedRunningTime="2025-12-03 16:39:59.760627572 +0000 UTC m=+1296.679964035" Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.078831 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jc27s" Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.195949 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh9sd\" (UniqueName: \"kubernetes.io/projected/dcf80a9b-b333-4ebb-b757-0306230722df-kube-api-access-nh9sd\") pod \"dcf80a9b-b333-4ebb-b757-0306230722df\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.196055 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcf80a9b-b333-4ebb-b757-0306230722df-operator-scripts\") pod \"dcf80a9b-b333-4ebb-b757-0306230722df\" (UID: \"dcf80a9b-b333-4ebb-b757-0306230722df\") " Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.196641 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcf80a9b-b333-4ebb-b757-0306230722df-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dcf80a9b-b333-4ebb-b757-0306230722df" (UID: "dcf80a9b-b333-4ebb-b757-0306230722df"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.201560 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcf80a9b-b333-4ebb-b757-0306230722df-kube-api-access-nh9sd" (OuterVolumeSpecName: "kube-api-access-nh9sd") pod "dcf80a9b-b333-4ebb-b757-0306230722df" (UID: "dcf80a9b-b333-4ebb-b757-0306230722df"). InnerVolumeSpecName "kube-api-access-nh9sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.299085 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh9sd\" (UniqueName: \"kubernetes.io/projected/dcf80a9b-b333-4ebb-b757-0306230722df-kube-api-access-nh9sd\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.299146 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dcf80a9b-b333-4ebb-b757-0306230722df-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.732164 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-jc27s" event={"ID":"dcf80a9b-b333-4ebb-b757-0306230722df","Type":"ContainerDied","Data":"419b46eb573cd18db2a684a0e2f3aa4e1eb0b5c8dc9f393364843746a93f8ba4"} Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.732237 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="419b46eb573cd18db2a684a0e2f3aa4e1eb0b5c8dc9f393364843746a93f8ba4" Dec 03 16:40:00 crc kubenswrapper[4768]: I1203 16:40:00.732341 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-jc27s" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.190765 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nlld2" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.324892 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64xjk\" (UniqueName: \"kubernetes.io/projected/b709be2c-2e7f-4013-889f-cff78d262a56-kube-api-access-64xjk\") pod \"b709be2c-2e7f-4013-889f-cff78d262a56\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.325095 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b709be2c-2e7f-4013-889f-cff78d262a56-operator-scripts\") pod \"b709be2c-2e7f-4013-889f-cff78d262a56\" (UID: \"b709be2c-2e7f-4013-889f-cff78d262a56\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.326092 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b709be2c-2e7f-4013-889f-cff78d262a56-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b709be2c-2e7f-4013-889f-cff78d262a56" (UID: "b709be2c-2e7f-4013-889f-cff78d262a56"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.334082 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b709be2c-2e7f-4013-889f-cff78d262a56-kube-api-access-64xjk" (OuterVolumeSpecName: "kube-api-access-64xjk") pod "b709be2c-2e7f-4013-889f-cff78d262a56" (UID: "b709be2c-2e7f-4013-889f-cff78d262a56"). InnerVolumeSpecName "kube-api-access-64xjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.428038 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64xjk\" (UniqueName: \"kubernetes.io/projected/b709be2c-2e7f-4013-889f-cff78d262a56-kube-api-access-64xjk\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.428106 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b709be2c-2e7f-4013-889f-cff78d262a56-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.436239 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.443668 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.453057 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.466725 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9zmrs" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.475646 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.529555 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95e13608-2d5c-43c8-a443-715a32b7edda-operator-scripts\") pod \"95e13608-2d5c-43c8-a443-715a32b7edda\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.529618 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-combined-ca-bundle\") pod \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.529650 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-ring-data-devices\") pod \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.529698 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mp2t\" (UniqueName: \"kubernetes.io/projected/95e13608-2d5c-43c8-a443-715a32b7edda-kube-api-access-9mp2t\") pod \"95e13608-2d5c-43c8-a443-715a32b7edda\" (UID: \"95e13608-2d5c-43c8-a443-715a32b7edda\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.530196 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-scripts\") pod \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.530196 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95e13608-2d5c-43c8-a443-715a32b7edda-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "95e13608-2d5c-43c8-a443-715a32b7edda" (UID: "95e13608-2d5c-43c8-a443-715a32b7edda"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.530280 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-swiftconf\") pod \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.530277 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "43a9322c-e5fe-40d8-849f-dc84a5763f9c" (UID: "43a9322c-e5fe-40d8-849f-dc84a5763f9c"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.530457 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tcbl\" (UniqueName: \"kubernetes.io/projected/43a9322c-e5fe-40d8-849f-dc84a5763f9c-kube-api-access-5tcbl\") pod \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.530562 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-dispersionconf\") pod \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.530639 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/43a9322c-e5fe-40d8-849f-dc84a5763f9c-etc-swift\") pod \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\" (UID: \"43a9322c-e5fe-40d8-849f-dc84a5763f9c\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.531241 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95e13608-2d5c-43c8-a443-715a32b7edda-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.531271 4768 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.531844 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/43a9322c-e5fe-40d8-849f-dc84a5763f9c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "43a9322c-e5fe-40d8-849f-dc84a5763f9c" (UID: "43a9322c-e5fe-40d8-849f-dc84a5763f9c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.534807 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95e13608-2d5c-43c8-a443-715a32b7edda-kube-api-access-9mp2t" (OuterVolumeSpecName: "kube-api-access-9mp2t") pod "95e13608-2d5c-43c8-a443-715a32b7edda" (UID: "95e13608-2d5c-43c8-a443-715a32b7edda"). InnerVolumeSpecName "kube-api-access-9mp2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.537224 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43a9322c-e5fe-40d8-849f-dc84a5763f9c-kube-api-access-5tcbl" (OuterVolumeSpecName: "kube-api-access-5tcbl") pod "43a9322c-e5fe-40d8-849f-dc84a5763f9c" (UID: "43a9322c-e5fe-40d8-849f-dc84a5763f9c"). InnerVolumeSpecName "kube-api-access-5tcbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.539400 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "43a9322c-e5fe-40d8-849f-dc84a5763f9c" (UID: "43a9322c-e5fe-40d8-849f-dc84a5763f9c"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.555537 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43a9322c-e5fe-40d8-849f-dc84a5763f9c" (UID: "43a9322c-e5fe-40d8-849f-dc84a5763f9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.555889 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-scripts" (OuterVolumeSpecName: "scripts") pod "43a9322c-e5fe-40d8-849f-dc84a5763f9c" (UID: "43a9322c-e5fe-40d8-849f-dc84a5763f9c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.562460 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "43a9322c-e5fe-40d8-849f-dc84a5763f9c" (UID: "43a9322c-e5fe-40d8-849f-dc84a5763f9c"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.631990 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5m52h\" (UniqueName: \"kubernetes.io/projected/2bc7eee1-578b-4091-8261-2f27ad2122cc-kube-api-access-5m52h\") pod \"2bc7eee1-578b-4091-8261-2f27ad2122cc\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632049 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f5nw\" (UniqueName: \"kubernetes.io/projected/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-kube-api-access-7f5nw\") pod \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632135 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ksxnm\" (UniqueName: \"kubernetes.io/projected/ad42c9c3-1853-43c3-a434-23d0889b8dd4-kube-api-access-ksxnm\") pod \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632161 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-operator-scripts\") pod \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\" (UID: \"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632220 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad42c9c3-1853-43c3-a434-23d0889b8dd4-operator-scripts\") pod \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\" (UID: \"ad42c9c3-1853-43c3-a434-23d0889b8dd4\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632254 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc7eee1-578b-4091-8261-2f27ad2122cc-operator-scripts\") pod \"2bc7eee1-578b-4091-8261-2f27ad2122cc\" (UID: \"2bc7eee1-578b-4091-8261-2f27ad2122cc\") " Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632857 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tcbl\" (UniqueName: \"kubernetes.io/projected/43a9322c-e5fe-40d8-849f-dc84a5763f9c-kube-api-access-5tcbl\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632882 4768 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632899 4768 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/43a9322c-e5fe-40d8-849f-dc84a5763f9c-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632911 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632923 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mp2t\" (UniqueName: \"kubernetes.io/projected/95e13608-2d5c-43c8-a443-715a32b7edda-kube-api-access-9mp2t\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632917 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad42c9c3-1853-43c3-a434-23d0889b8dd4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad42c9c3-1853-43c3-a434-23d0889b8dd4" (UID: "ad42c9c3-1853-43c3-a434-23d0889b8dd4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632936 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/43a9322c-e5fe-40d8-849f-dc84a5763f9c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.632983 4768 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/43a9322c-e5fe-40d8-849f-dc84a5763f9c-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.633075 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4" (UID: "56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.633435 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bc7eee1-578b-4091-8261-2f27ad2122cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2bc7eee1-578b-4091-8261-2f27ad2122cc" (UID: "2bc7eee1-578b-4091-8261-2f27ad2122cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.634859 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-kube-api-access-7f5nw" (OuterVolumeSpecName: "kube-api-access-7f5nw") pod "56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4" (UID: "56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4"). InnerVolumeSpecName "kube-api-access-7f5nw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.635650 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad42c9c3-1853-43c3-a434-23d0889b8dd4-kube-api-access-ksxnm" (OuterVolumeSpecName: "kube-api-access-ksxnm") pod "ad42c9c3-1853-43c3-a434-23d0889b8dd4" (UID: "ad42c9c3-1853-43c3-a434-23d0889b8dd4"). InnerVolumeSpecName "kube-api-access-ksxnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.638145 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bc7eee1-578b-4091-8261-2f27ad2122cc-kube-api-access-5m52h" (OuterVolumeSpecName: "kube-api-access-5m52h") pod "2bc7eee1-578b-4091-8261-2f27ad2122cc" (UID: "2bc7eee1-578b-4091-8261-2f27ad2122cc"). InnerVolumeSpecName "kube-api-access-5m52h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.735461 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5m52h\" (UniqueName: \"kubernetes.io/projected/2bc7eee1-578b-4091-8261-2f27ad2122cc-kube-api-access-5m52h\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.735531 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f5nw\" (UniqueName: \"kubernetes.io/projected/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-kube-api-access-7f5nw\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.735547 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ksxnm\" (UniqueName: \"kubernetes.io/projected/ad42c9c3-1853-43c3-a434-23d0889b8dd4-kube-api-access-ksxnm\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.735559 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.735571 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad42c9c3-1853-43c3-a434-23d0889b8dd4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.735584 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2bc7eee1-578b-4091-8261-2f27ad2122cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.742256 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-9zmrs" event={"ID":"ad42c9c3-1853-43c3-a434-23d0889b8dd4","Type":"ContainerDied","Data":"73ee1c87c9c2a434dd73ced43f7809df1cb669d582bb8f805f29772c709a203c"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.742301 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73ee1c87c9c2a434dd73ced43f7809df1cb669d582bb8f805f29772c709a203c" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.742267 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-9zmrs" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.744471 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-976f-account-create-update-gsp5b" event={"ID":"56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4","Type":"ContainerDied","Data":"add0bfa42381e5c2d5f420e0f128cc8abd12df0794118517f5dc4a73f6a2c59e"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.744534 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="add0bfa42381e5c2d5f420e0f128cc8abd12df0794118517f5dc4a73f6a2c59e" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.744537 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-976f-account-create-update-gsp5b" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.746348 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-ac7a-account-create-update-p8klf" event={"ID":"95e13608-2d5c-43c8-a443-715a32b7edda","Type":"ContainerDied","Data":"def7fd44ecf80cf282b99e70d2fa4cb4154641c0a03d3784efe3518ce28ac804"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.746382 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="def7fd44ecf80cf282b99e70d2fa4cb4154641c0a03d3784efe3518ce28ac804" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.746415 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-ac7a-account-create-update-p8klf" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.748202 4768 generic.go:334] "Generic (PLEG): container finished" podID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerID="294cadb0d1f3f85d9f524a4aefc23a1ebc5763fc2780bdaeac0caaf1e2d2aa87" exitCode=0 Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.748249 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5061b6be-b64d-4dfd-8431-701066b8cefa","Type":"ContainerDied","Data":"294cadb0d1f3f85d9f524a4aefc23a1ebc5763fc2780bdaeac0caaf1e2d2aa87"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.759853 4768 generic.go:334] "Generic (PLEG): container finished" podID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerID="a8e17f753020d4487160b9ae587a0c00bfee7bf215ce333c439e75659175f6e7" exitCode=0 Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.759932 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0b8fcf68-a566-4dc2-9137-4b1e85eede0f","Type":"ContainerDied","Data":"a8e17f753020d4487160b9ae587a0c00bfee7bf215ce333c439e75659175f6e7"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.762689 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-rxvbr" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.762727 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-rxvbr" event={"ID":"43a9322c-e5fe-40d8-849f-dc84a5763f9c","Type":"ContainerDied","Data":"773a2d4d59c9e7a53364e3f96e32e955b9de8c674e8ed11e5e9d2bc0724d04ba"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.762776 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="773a2d4d59c9e7a53364e3f96e32e955b9de8c674e8ed11e5e9d2bc0724d04ba" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.764650 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-49e4-account-create-update-v4lpm" event={"ID":"2bc7eee1-578b-4091-8261-2f27ad2122cc","Type":"ContainerDied","Data":"fad7db735a00ce7a80c17675991c50bc471085ae8552dbd51faa47d033be4b21"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.764688 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fad7db735a00ce7a80c17675991c50bc471085ae8552dbd51faa47d033be4b21" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.764687 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-49e4-account-create-update-v4lpm" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.767337 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nlld2" event={"ID":"b709be2c-2e7f-4013-889f-cff78d262a56","Type":"ContainerDied","Data":"4defac346a65252c269f9ea35107c6dd90033d5c58312877b43d2b76363faa4e"} Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.767359 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4defac346a65252c269f9ea35107c6dd90033d5c58312877b43d2b76363faa4e" Dec 03 16:40:01 crc kubenswrapper[4768]: I1203 16:40:01.767396 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nlld2" Dec 03 16:40:02 crc kubenswrapper[4768]: I1203 16:40:02.790371 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5061b6be-b64d-4dfd-8431-701066b8cefa","Type":"ContainerStarted","Data":"7af7ab7bf1c533b1ddfef6547d7db892461c1c6ca7defbd233606f8cab35dff6"} Dec 03 16:40:02 crc kubenswrapper[4768]: I1203 16:40:02.791339 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:40:02 crc kubenswrapper[4768]: I1203 16:40:02.793365 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0b8fcf68-a566-4dc2-9137-4b1e85eede0f","Type":"ContainerStarted","Data":"5b3e8d3049ece4d30ec1e966081fa2d3269e38758d0a529ab387872629abf7ab"} Dec 03 16:40:02 crc kubenswrapper[4768]: I1203 16:40:02.794263 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 16:40:02 crc kubenswrapper[4768]: I1203 16:40:02.824738 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=57.257711757 podStartE2EDuration="1m21.824720482s" podCreationTimestamp="2025-12-03 16:38:41 +0000 UTC" firstStartedPulling="2025-12-03 16:38:57.872953046 +0000 UTC m=+1234.792289469" lastFinishedPulling="2025-12-03 16:39:22.439961731 +0000 UTC m=+1259.359298194" observedRunningTime="2025-12-03 16:40:02.814673102 +0000 UTC m=+1299.734009555" watchObservedRunningTime="2025-12-03 16:40:02.824720482 +0000 UTC m=+1299.744056905" Dec 03 16:40:02 crc kubenswrapper[4768]: I1203 16:40:02.842689 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=57.582649904 podStartE2EDuration="1m21.842671294s" podCreationTimestamp="2025-12-03 16:38:41 +0000 UTC" firstStartedPulling="2025-12-03 16:38:58.17989682 +0000 UTC m=+1235.099233243" lastFinishedPulling="2025-12-03 16:39:22.43991821 +0000 UTC m=+1259.359254633" observedRunningTime="2025-12-03 16:40:02.840368732 +0000 UTC m=+1299.759705175" watchObservedRunningTime="2025-12-03 16:40:02.842671294 +0000 UTC m=+1299.762007717" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.210461 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.227961 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-hjhg9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.338864 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.340559 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.438922 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-6bhgk-config-9rtm9"] Dec 03 16:40:04 crc kubenswrapper[4768]: E1203 16:40:04.439380 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad42c9c3-1853-43c3-a434-23d0889b8dd4" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439401 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad42c9c3-1853-43c3-a434-23d0889b8dd4" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: E1203 16:40:04.439428 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95e13608-2d5c-43c8-a443-715a32b7edda" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439438 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="95e13608-2d5c-43c8-a443-715a32b7edda" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: E1203 16:40:04.439466 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcf80a9b-b333-4ebb-b757-0306230722df" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439475 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcf80a9b-b333-4ebb-b757-0306230722df" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: E1203 16:40:04.439488 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b709be2c-2e7f-4013-889f-cff78d262a56" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439496 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b709be2c-2e7f-4013-889f-cff78d262a56" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: E1203 16:40:04.439515 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439524 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: E1203 16:40:04.439538 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bc7eee1-578b-4091-8261-2f27ad2122cc" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439548 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bc7eee1-578b-4091-8261-2f27ad2122cc" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: E1203 16:40:04.439563 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43a9322c-e5fe-40d8-849f-dc84a5763f9c" containerName="swift-ring-rebalance" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439570 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="43a9322c-e5fe-40d8-849f-dc84a5763f9c" containerName="swift-ring-rebalance" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439813 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bc7eee1-578b-4091-8261-2f27ad2122cc" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439834 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b709be2c-2e7f-4013-889f-cff78d262a56" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439853 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcf80a9b-b333-4ebb-b757-0306230722df" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439873 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439888 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="43a9322c-e5fe-40d8-849f-dc84a5763f9c" containerName="swift-ring-rebalance" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439896 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad42c9c3-1853-43c3-a434-23d0889b8dd4" containerName="mariadb-database-create" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.439906 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="95e13608-2d5c-43c8-a443-715a32b7edda" containerName="mariadb-account-create-update" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.440716 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.453431 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.464509 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6bhgk-config-9rtm9"] Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.598119 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-log-ovn\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.598282 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.598323 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run-ovn\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.598408 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qhnl\" (UniqueName: \"kubernetes.io/projected/e1a326a8-ad06-4d22-8045-c3c461e93aa3-kube-api-access-7qhnl\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.598466 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-scripts\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.598563 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-additional-scripts\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.700662 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qhnl\" (UniqueName: \"kubernetes.io/projected/e1a326a8-ad06-4d22-8045-c3c461e93aa3-kube-api-access-7qhnl\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.700725 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-scripts\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.700774 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-additional-scripts\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.700808 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-log-ovn\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.700885 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.700900 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run-ovn\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.701173 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run-ovn\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.701223 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-log-ovn\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.701257 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.701722 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-additional-scripts\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.702903 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-scripts\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.721470 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qhnl\" (UniqueName: \"kubernetes.io/projected/e1a326a8-ad06-4d22-8045-c3c461e93aa3-kube-api-access-7qhnl\") pod \"ovn-controller-6bhgk-config-9rtm9\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.761079 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:04 crc kubenswrapper[4768]: I1203 16:40:04.821908 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:05 crc kubenswrapper[4768]: I1203 16:40:05.325950 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-6bhgk-config-9rtm9"] Dec 03 16:40:05 crc kubenswrapper[4768]: I1203 16:40:05.829093 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6bhgk-config-9rtm9" event={"ID":"e1a326a8-ad06-4d22-8045-c3c461e93aa3","Type":"ContainerStarted","Data":"7b3dad82f3542873f47e55507989d807436ef1e95ebefc2d594e644baa4e6aea"} Dec 03 16:40:06 crc kubenswrapper[4768]: I1203 16:40:06.839698 4768 generic.go:334] "Generic (PLEG): container finished" podID="e1a326a8-ad06-4d22-8045-c3c461e93aa3" containerID="fb558adef74ff27918fd9121bf631dc251ee93cd72bde689b18eaa3637b793d0" exitCode=0 Dec 03 16:40:06 crc kubenswrapper[4768]: I1203 16:40:06.839762 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6bhgk-config-9rtm9" event={"ID":"e1a326a8-ad06-4d22-8045-c3c461e93aa3","Type":"ContainerDied","Data":"fb558adef74ff27918fd9121bf631dc251ee93cd72bde689b18eaa3637b793d0"} Dec 03 16:40:06 crc kubenswrapper[4768]: I1203 16:40:06.981367 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-zqcnl"] Dec 03 16:40:06 crc kubenswrapper[4768]: I1203 16:40:06.982817 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:06 crc kubenswrapper[4768]: I1203 16:40:06.984539 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tdxnd" Dec 03 16:40:06 crc kubenswrapper[4768]: I1203 16:40:06.985170 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.004501 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zqcnl"] Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.046284 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-config-data\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.046343 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4fnp\" (UniqueName: \"kubernetes.io/projected/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-kube-api-access-d4fnp\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.046581 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-db-sync-config-data\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.046670 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-combined-ca-bundle\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.148390 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4fnp\" (UniqueName: \"kubernetes.io/projected/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-kube-api-access-d4fnp\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.148508 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-db-sync-config-data\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.148528 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-combined-ca-bundle\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.148682 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-config-data\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.154728 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-combined-ca-bundle\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.155029 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-db-sync-config-data\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.166967 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-config-data\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.167769 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4fnp\" (UniqueName: \"kubernetes.io/projected/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-kube-api-access-d4fnp\") pod \"glance-db-sync-zqcnl\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.314801 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.483300 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.483562 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="prometheus" containerID="cri-o://6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d" gracePeriod=600 Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.483688 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="config-reloader" containerID="cri-o://154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2" gracePeriod=600 Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.483688 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="thanos-sidecar" containerID="cri-o://512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6" gracePeriod=600 Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.693677 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5c0ad451-c513-4f94-ac08-aaa2c7df9ae8" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.852275 4768 generic.go:334] "Generic (PLEG): container finished" podID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerID="512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6" exitCode=0 Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.852305 4768 generic.go:334] "Generic (PLEG): container finished" podID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerID="6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d" exitCode=0 Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.852449 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerDied","Data":"512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6"} Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.852473 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerDied","Data":"6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d"} Dec 03 16:40:07 crc kubenswrapper[4768]: I1203 16:40:07.983341 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-zqcnl"] Dec 03 16:40:08 crc kubenswrapper[4768]: W1203 16:40:08.011609 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod495fd475_f8cd_4fd9_86d4_cdf7765e7ad6.slice/crio-6d63abc027f272c2c351e3b71095fa275aa5915870e1959a14a673689256b8fb WatchSource:0}: Error finding container 6d63abc027f272c2c351e3b71095fa275aa5915870e1959a14a673689256b8fb: Status 404 returned error can't find the container with id 6d63abc027f272c2c351e3b71095fa275aa5915870e1959a14a673689256b8fb Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.140639 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.267098 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-scripts\") pod \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.267156 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-additional-scripts\") pod \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.267213 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-log-ovn\") pod \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.267270 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qhnl\" (UniqueName: \"kubernetes.io/projected/e1a326a8-ad06-4d22-8045-c3c461e93aa3-kube-api-access-7qhnl\") pod \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.267502 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run\") pod \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.268538 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run" (OuterVolumeSpecName: "var-run") pod "e1a326a8-ad06-4d22-8045-c3c461e93aa3" (UID: "e1a326a8-ad06-4d22-8045-c3c461e93aa3"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.268644 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "e1a326a8-ad06-4d22-8045-c3c461e93aa3" (UID: "e1a326a8-ad06-4d22-8045-c3c461e93aa3"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.268639 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "e1a326a8-ad06-4d22-8045-c3c461e93aa3" (UID: "e1a326a8-ad06-4d22-8045-c3c461e93aa3"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.268726 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "e1a326a8-ad06-4d22-8045-c3c461e93aa3" (UID: "e1a326a8-ad06-4d22-8045-c3c461e93aa3"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.269156 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-scripts" (OuterVolumeSpecName: "scripts") pod "e1a326a8-ad06-4d22-8045-c3c461e93aa3" (UID: "e1a326a8-ad06-4d22-8045-c3c461e93aa3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.269740 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run-ovn\") pod \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\" (UID: \"e1a326a8-ad06-4d22-8045-c3c461e93aa3\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.270532 4768 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.270545 4768 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.270554 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.270564 4768 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e1a326a8-ad06-4d22-8045-c3c461e93aa3-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.270574 4768 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e1a326a8-ad06-4d22-8045-c3c461e93aa3-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.288086 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1a326a8-ad06-4d22-8045-c3c461e93aa3-kube-api-access-7qhnl" (OuterVolumeSpecName: "kube-api-access-7qhnl") pod "e1a326a8-ad06-4d22-8045-c3c461e93aa3" (UID: "e1a326a8-ad06-4d22-8045-c3c461e93aa3"). InnerVolumeSpecName "kube-api-access-7qhnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.373155 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qhnl\" (UniqueName: \"kubernetes.io/projected/e1a326a8-ad06-4d22-8045-c3c461e93aa3-kube-api-access-7qhnl\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.537088 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.575959 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-config\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.576353 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-thanos-prometheus-http-client-file\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.576414 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/11cf6253-a0c6-4968-8bf9-3900aec31852-config-out\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.576447 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-tls-assets\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.576481 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvkvl\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-kube-api-access-qvkvl\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.576507 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/11cf6253-a0c6-4968-8bf9-3900aec31852-prometheus-metric-storage-rulefiles-0\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.576661 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.576701 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-web-config\") pod \"11cf6253-a0c6-4968-8bf9-3900aec31852\" (UID: \"11cf6253-a0c6-4968-8bf9-3900aec31852\") " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.582857 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-config" (OuterVolumeSpecName: "config") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.583155 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11cf6253-a0c6-4968-8bf9-3900aec31852-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.583448 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.586812 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11cf6253-a0c6-4968-8bf9-3900aec31852-config-out" (OuterVolumeSpecName: "config-out") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.586855 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-kube-api-access-qvkvl" (OuterVolumeSpecName: "kube-api-access-qvkvl") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "kube-api-access-qvkvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.586861 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.654296 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-web-config" (OuterVolumeSpecName: "web-config") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.674069 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "11cf6253-a0c6-4968-8bf9-3900aec31852" (UID: "11cf6253-a0c6-4968-8bf9-3900aec31852"). InnerVolumeSpecName "pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679341 4768 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679376 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvkvl\" (UniqueName: \"kubernetes.io/projected/11cf6253-a0c6-4968-8bf9-3900aec31852-kube-api-access-qvkvl\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679387 4768 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/11cf6253-a0c6-4968-8bf9-3900aec31852-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679417 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") on node \"crc\" " Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679437 4768 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-web-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679448 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679458 4768 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/11cf6253-a0c6-4968-8bf9-3900aec31852-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.679466 4768 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/11cf6253-a0c6-4968-8bf9-3900aec31852-config-out\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.710563 4768 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.710764 4768 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d") on node "crc" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.781279 4768 reconciler_common.go:293] "Volume detached for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.865858 4768 generic.go:334] "Generic (PLEG): container finished" podID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerID="154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2" exitCode=0 Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.865981 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.865976 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerDied","Data":"154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2"} Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.866134 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"11cf6253-a0c6-4968-8bf9-3900aec31852","Type":"ContainerDied","Data":"00994b8e4b32c45bd7ae44aac5a5d4df0e2bf34968ecb0ee9ea2624b0394ec6c"} Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.866168 4768 scope.go:117] "RemoveContainer" containerID="512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.869836 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-6bhgk-config-9rtm9" event={"ID":"e1a326a8-ad06-4d22-8045-c3c461e93aa3","Type":"ContainerDied","Data":"7b3dad82f3542873f47e55507989d807436ef1e95ebefc2d594e644baa4e6aea"} Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.869893 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b3dad82f3542873f47e55507989d807436ef1e95ebefc2d594e644baa4e6aea" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.869937 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-6bhgk-config-9rtm9" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.879758 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zqcnl" event={"ID":"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6","Type":"ContainerStarted","Data":"6d63abc027f272c2c351e3b71095fa275aa5915870e1959a14a673689256b8fb"} Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.892975 4768 scope.go:117] "RemoveContainer" containerID="154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.907584 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.914574 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.935734 4768 scope.go:117] "RemoveContainer" containerID="6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.955109 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:40:08 crc kubenswrapper[4768]: E1203 16:40:08.955678 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a326a8-ad06-4d22-8045-c3c461e93aa3" containerName="ovn-config" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.955702 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a326a8-ad06-4d22-8045-c3c461e93aa3" containerName="ovn-config" Dec 03 16:40:08 crc kubenswrapper[4768]: E1203 16:40:08.955746 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="thanos-sidecar" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.955756 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="thanos-sidecar" Dec 03 16:40:08 crc kubenswrapper[4768]: E1203 16:40:08.955775 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="init-config-reloader" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.955783 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="init-config-reloader" Dec 03 16:40:08 crc kubenswrapper[4768]: E1203 16:40:08.955798 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="prometheus" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.955805 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="prometheus" Dec 03 16:40:08 crc kubenswrapper[4768]: E1203 16:40:08.955817 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="config-reloader" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.955825 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="config-reloader" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.956045 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="prometheus" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.956060 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="thanos-sidecar" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.956078 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1a326a8-ad06-4d22-8045-c3c461e93aa3" containerName="ovn-config" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.956097 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" containerName="config-reloader" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.963134 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.966321 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.966584 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.966826 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.966948 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.967238 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.969656 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-mvn59" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.977868 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.979329 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 03 16:40:08 crc kubenswrapper[4768]: I1203 16:40:08.988965 4768 scope.go:117] "RemoveContainer" containerID="cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.023306 4768 scope.go:117] "RemoveContainer" containerID="512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6" Dec 03 16:40:09 crc kubenswrapper[4768]: E1203 16:40:09.025139 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6\": container with ID starting with 512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6 not found: ID does not exist" containerID="512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.025205 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6"} err="failed to get container status \"512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6\": rpc error: code = NotFound desc = could not find container \"512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6\": container with ID starting with 512bcdea6f5c0a0efa50f6f6b1e0c68c0976c402f8f061d273f4d26a8c294ee6 not found: ID does not exist" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.025270 4768 scope.go:117] "RemoveContainer" containerID="154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2" Dec 03 16:40:09 crc kubenswrapper[4768]: E1203 16:40:09.026447 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2\": container with ID starting with 154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2 not found: ID does not exist" containerID="154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.026502 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2"} err="failed to get container status \"154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2\": rpc error: code = NotFound desc = could not find container \"154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2\": container with ID starting with 154e2bdd22bbe02ad6cb2fede2c154f1fa2d7d408338f98c118223b1489481a2 not found: ID does not exist" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.026558 4768 scope.go:117] "RemoveContainer" containerID="6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d" Dec 03 16:40:09 crc kubenswrapper[4768]: E1203 16:40:09.027945 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d\": container with ID starting with 6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d not found: ID does not exist" containerID="6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.027986 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d"} err="failed to get container status \"6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d\": rpc error: code = NotFound desc = could not find container \"6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d\": container with ID starting with 6f0713208b40252cb6227b5649dd38285a99d4555633fe1c12b77d835a9d498d not found: ID does not exist" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.028011 4768 scope.go:117] "RemoveContainer" containerID="cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc" Dec 03 16:40:09 crc kubenswrapper[4768]: E1203 16:40:09.028848 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc\": container with ID starting with cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc not found: ID does not exist" containerID="cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.028921 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc"} err="failed to get container status \"cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc\": rpc error: code = NotFound desc = could not find container \"cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc\": container with ID starting with cac1fd3ef6dde01c6b7d3ec3d7a29676600706a4f4f32004214aee11c4adc0cc not found: ID does not exist" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087467 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97gpx\" (UniqueName: \"kubernetes.io/projected/98a2cac0-c468-421c-8acd-b7f7e3b471ea-kube-api-access-97gpx\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087673 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087700 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087733 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-config\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087772 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087794 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087813 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/98a2cac0-c468-421c-8acd-b7f7e3b471ea-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087855 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/98a2cac0-c468-421c-8acd-b7f7e3b471ea-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087877 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087902 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/98a2cac0-c468-421c-8acd-b7f7e3b471ea-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.087916 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189220 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189284 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/98a2cac0-c468-421c-8acd-b7f7e3b471ea-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189311 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189345 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97gpx\" (UniqueName: \"kubernetes.io/projected/98a2cac0-c468-421c-8acd-b7f7e3b471ea-kube-api-access-97gpx\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189484 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189519 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189557 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-config\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189635 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189662 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189684 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/98a2cac0-c468-421c-8acd-b7f7e3b471ea-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.189721 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/98a2cac0-c468-421c-8acd-b7f7e3b471ea-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.200666 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.202107 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/98a2cac0-c468-421c-8acd-b7f7e3b471ea-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.206114 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.206699 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/98a2cac0-c468-421c-8acd-b7f7e3b471ea-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.212175 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.212242 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/bc62c60cce49c9fe1f0c4e1ad5989a7b6836177e41a5fab7867a183c10310e2f/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.216024 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.217767 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.217849 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/98a2cac0-c468-421c-8acd-b7f7e3b471ea-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.218401 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-config\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.243548 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/98a2cac0-c468-421c-8acd-b7f7e3b471ea-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.250694 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97gpx\" (UniqueName: \"kubernetes.io/projected/98a2cac0-c468-421c-8acd-b7f7e3b471ea-kube-api-access-97gpx\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.261683 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-6bhgk" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.262613 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.332643 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-6bhgk-config-9rtm9"] Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.345520 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-6bhgk-config-9rtm9"] Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.485741 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-91d7dcdd-3c3f-4429-8ee3-a323a403c83d\") pod \"prometheus-metric-storage-0\" (UID: \"98a2cac0-c468-421c-8acd-b7f7e3b471ea\") " pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.551913 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11cf6253-a0c6-4968-8bf9-3900aec31852" path="/var/lib/kubelet/pods/11cf6253-a0c6-4968-8bf9-3900aec31852/volumes" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.552877 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1a326a8-ad06-4d22-8045-c3c461e93aa3" path="/var/lib/kubelet/pods/e1a326a8-ad06-4d22-8045-c3c461e93aa3/volumes" Dec 03 16:40:09 crc kubenswrapper[4768]: I1203 16:40:09.583866 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:10 crc kubenswrapper[4768]: I1203 16:40:10.052769 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 03 16:40:10 crc kubenswrapper[4768]: I1203 16:40:10.901112 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98a2cac0-c468-421c-8acd-b7f7e3b471ea","Type":"ContainerStarted","Data":"0f7c36c95257dc735fd6263f1bc86c9efdce1a1a0e2630b4c7f730402b5a5e08"} Dec 03 16:40:11 crc kubenswrapper[4768]: I1203 16:40:11.153161 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:40:11 crc kubenswrapper[4768]: I1203 16:40:11.159877 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/147901f2-6caa-4983-8e45-7e938cd9f36b-etc-swift\") pod \"swift-storage-0\" (UID: \"147901f2-6caa-4983-8e45-7e938cd9f36b\") " pod="openstack/swift-storage-0" Dec 03 16:40:11 crc kubenswrapper[4768]: I1203 16:40:11.338198 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 03 16:40:11 crc kubenswrapper[4768]: I1203 16:40:11.907234 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 03 16:40:11 crc kubenswrapper[4768]: W1203 16:40:11.920865 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod147901f2_6caa_4983_8e45_7e938cd9f36b.slice/crio-0e67f030cfdd608a5fa0224fe46aa83dbe6c5fad6957c7f1c23b6cbf4c79b7db WatchSource:0}: Error finding container 0e67f030cfdd608a5fa0224fe46aa83dbe6c5fad6957c7f1c23b6cbf4c79b7db: Status 404 returned error can't find the container with id 0e67f030cfdd608a5fa0224fe46aa83dbe6c5fad6957c7f1c23b6cbf4c79b7db Dec 03 16:40:12 crc kubenswrapper[4768]: I1203 16:40:12.957164 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"0e67f030cfdd608a5fa0224fe46aa83dbe6c5fad6957c7f1c23b6cbf4c79b7db"} Dec 03 16:40:12 crc kubenswrapper[4768]: I1203 16:40:12.991428 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.274290 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.295842 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-gqbzb"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.296948 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.327776 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-gqbzb"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.397838 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14793fd-c763-44bd-b629-3426b1ccc605-operator-scripts\") pod \"barbican-db-create-gqbzb\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.397916 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxhmh\" (UniqueName: \"kubernetes.io/projected/f14793fd-c763-44bd-b629-3426b1ccc605-kube-api-access-wxhmh\") pod \"barbican-db-create-gqbzb\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.435567 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-f95e-account-create-update-f4h7x"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.437060 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.443116 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.447078 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f95e-account-create-update-f4h7x"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.500128 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf11438-066f-4718-9f14-d19c5d998c5a-operator-scripts\") pod \"barbican-f95e-account-create-update-f4h7x\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.500311 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14793fd-c763-44bd-b629-3426b1ccc605-operator-scripts\") pod \"barbican-db-create-gqbzb\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.500345 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lj8k\" (UniqueName: \"kubernetes.io/projected/bcf11438-066f-4718-9f14-d19c5d998c5a-kube-api-access-2lj8k\") pod \"barbican-f95e-account-create-update-f4h7x\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.500380 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxhmh\" (UniqueName: \"kubernetes.io/projected/f14793fd-c763-44bd-b629-3426b1ccc605-kube-api-access-wxhmh\") pod \"barbican-db-create-gqbzb\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.501333 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14793fd-c763-44bd-b629-3426b1ccc605-operator-scripts\") pod \"barbican-db-create-gqbzb\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.528207 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-2z5jj"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.529980 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.539856 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxhmh\" (UniqueName: \"kubernetes.io/projected/f14793fd-c763-44bd-b629-3426b1ccc605-kube-api-access-wxhmh\") pod \"barbican-db-create-gqbzb\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.566166 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-2z5jj"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.603307 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf11438-066f-4718-9f14-d19c5d998c5a-operator-scripts\") pod \"barbican-f95e-account-create-update-f4h7x\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.603380 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-operator-scripts\") pod \"cloudkitty-db-create-2z5jj\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.603505 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lj8k\" (UniqueName: \"kubernetes.io/projected/bcf11438-066f-4718-9f14-d19c5d998c5a-kube-api-access-2lj8k\") pod \"barbican-f95e-account-create-update-f4h7x\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.603524 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt7c6\" (UniqueName: \"kubernetes.io/projected/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-kube-api-access-jt7c6\") pod \"cloudkitty-db-create-2z5jj\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.606287 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf11438-066f-4718-9f14-d19c5d998c5a-operator-scripts\") pod \"barbican-f95e-account-create-update-f4h7x\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.617158 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-9kgrw"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.623677 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.625379 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.626284 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lj8k\" (UniqueName: \"kubernetes.io/projected/bcf11438-066f-4718-9f14-d19c5d998c5a-kube-api-access-2lj8k\") pod \"barbican-f95e-account-create-update-f4h7x\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.654721 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9kgrw"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.705936 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt7c6\" (UniqueName: \"kubernetes.io/projected/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-kube-api-access-jt7c6\") pod \"cloudkitty-db-create-2z5jj\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.706086 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-operator-scripts\") pod \"cinder-db-create-9kgrw\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.706151 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-operator-scripts\") pod \"cloudkitty-db-create-2z5jj\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.706198 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hrzp\" (UniqueName: \"kubernetes.io/projected/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-kube-api-access-8hrzp\") pod \"cinder-db-create-9kgrw\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.707358 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-operator-scripts\") pod \"cloudkitty-db-create-2z5jj\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.744158 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-5dfb-account-create-update-txwn7"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.745241 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5dfb-account-create-update-txwn7"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.745316 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.756133 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt7c6\" (UniqueName: \"kubernetes.io/projected/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-kube-api-access-jt7c6\") pod \"cloudkitty-db-create-2z5jj\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.756296 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.768199 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.808064 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hrzp\" (UniqueName: \"kubernetes.io/projected/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-kube-api-access-8hrzp\") pod \"cinder-db-create-9kgrw\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.808161 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e9447e-4441-4183-9511-4780a1af50d4-operator-scripts\") pod \"cinder-5dfb-account-create-update-txwn7\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.808235 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxb4m\" (UniqueName: \"kubernetes.io/projected/d4e9447e-4441-4183-9511-4780a1af50d4-kube-api-access-wxb4m\") pod \"cinder-5dfb-account-create-update-txwn7\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.808316 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-operator-scripts\") pod \"cinder-db-create-9kgrw\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.809176 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-operator-scripts\") pod \"cinder-db-create-9kgrw\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.836159 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-2a41-account-create-update-qz7zm"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.837383 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.843353 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.859858 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hrzp\" (UniqueName: \"kubernetes.io/projected/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-kube-api-access-8hrzp\") pod \"cinder-db-create-9kgrw\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.870374 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-2a41-account-create-update-qz7zm"] Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.912414 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxb4m\" (UniqueName: \"kubernetes.io/projected/d4e9447e-4441-4183-9511-4780a1af50d4-kube-api-access-wxb4m\") pod \"cinder-5dfb-account-create-update-txwn7\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.913081 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e9447e-4441-4183-9511-4780a1af50d4-operator-scripts\") pod \"cinder-5dfb-account-create-update-txwn7\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.913393 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.916324 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e9447e-4441-4183-9511-4780a1af50d4-operator-scripts\") pod \"cinder-5dfb-account-create-update-txwn7\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:13 crc kubenswrapper[4768]: I1203 16:40:13.944565 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxb4m\" (UniqueName: \"kubernetes.io/projected/d4e9447e-4441-4183-9511-4780a1af50d4-kube-api-access-wxb4m\") pod \"cinder-5dfb-account-create-update-txwn7\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.003294 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-6j9c8"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.004543 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"cb4550298fdf255df53c933ca9ca1e69275fa7e26849648dec59f0faec44f1a1"} Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.004634 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.010809 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.011125 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-srlwn" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.011288 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.011338 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.011407 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.014079 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-operator-scripts\") pod \"cloudkitty-2a41-account-create-update-qz7zm\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.014194 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcvbj\" (UniqueName: \"kubernetes.io/projected/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-kube-api-access-wcvbj\") pod \"cloudkitty-2a41-account-create-update-qz7zm\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.047906 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98a2cac0-c468-421c-8acd-b7f7e3b471ea","Type":"ContainerStarted","Data":"c3afe68f2da1af9f8f3d7b6df856457d14ce22955937677e7224e06320891556"} Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.108032 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-d268s"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.122328 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.131185 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-6j9c8"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.134421 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-combined-ca-bundle\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.134467 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-config-data\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.134540 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-operator-scripts\") pod \"cloudkitty-2a41-account-create-update-qz7zm\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.134581 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcvbj\" (UniqueName: \"kubernetes.io/projected/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-kube-api-access-wcvbj\") pod \"cloudkitty-2a41-account-create-update-qz7zm\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.134639 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzdnb\" (UniqueName: \"kubernetes.io/projected/2a7a3873-e2cc-41e3-9151-715913ade3b2-kube-api-access-hzdnb\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.135773 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-operator-scripts\") pod \"cloudkitty-2a41-account-create-update-qz7zm\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.153221 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-d268s"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.162001 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-8124-account-create-update-n4jv6"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.162749 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.163672 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.179087 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.193046 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcvbj\" (UniqueName: \"kubernetes.io/projected/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-kube-api-access-wcvbj\") pod \"cloudkitty-2a41-account-create-update-qz7zm\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.238822 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-config-data\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.238989 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mx2zm\" (UniqueName: \"kubernetes.io/projected/4da4c050-3db3-42cc-9b36-e65917c8b977-kube-api-access-mx2zm\") pod \"neutron-db-create-d268s\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.239046 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4da4c050-3db3-42cc-9b36-e65917c8b977-operator-scripts\") pod \"neutron-db-create-d268s\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.239110 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzdnb\" (UniqueName: \"kubernetes.io/projected/2a7a3873-e2cc-41e3-9151-715913ade3b2-kube-api-access-hzdnb\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.239178 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-combined-ca-bundle\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.250399 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8124-account-create-update-n4jv6"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.257682 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-combined-ca-bundle\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.267213 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-config-data\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.296562 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzdnb\" (UniqueName: \"kubernetes.io/projected/2a7a3873-e2cc-41e3-9151-715913ade3b2-kube-api-access-hzdnb\") pod \"keystone-db-sync-6j9c8\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.352907 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mx2zm\" (UniqueName: \"kubernetes.io/projected/4da4c050-3db3-42cc-9b36-e65917c8b977-kube-api-access-mx2zm\") pod \"neutron-db-create-d268s\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.353156 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-operator-scripts\") pod \"neutron-8124-account-create-update-n4jv6\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.353184 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4da4c050-3db3-42cc-9b36-e65917c8b977-operator-scripts\") pod \"neutron-db-create-d268s\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.353208 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6mwz\" (UniqueName: \"kubernetes.io/projected/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-kube-api-access-z6mwz\") pod \"neutron-8124-account-create-update-n4jv6\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.359421 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-f95e-account-create-update-f4h7x"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.360808 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4da4c050-3db3-42cc-9b36-e65917c8b977-operator-scripts\") pod \"neutron-db-create-d268s\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.379974 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.418462 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mx2zm\" (UniqueName: \"kubernetes.io/projected/4da4c050-3db3-42cc-9b36-e65917c8b977-kube-api-access-mx2zm\") pod \"neutron-db-create-d268s\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.421394 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-gqbzb"] Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.456995 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-operator-scripts\") pod \"neutron-8124-account-create-update-n4jv6\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.457380 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6mwz\" (UniqueName: \"kubernetes.io/projected/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-kube-api-access-z6mwz\") pod \"neutron-8124-account-create-update-n4jv6\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.458507 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-operator-scripts\") pod \"neutron-8124-account-create-update-n4jv6\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.484261 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-d268s" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.493727 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.521094 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6mwz\" (UniqueName: \"kubernetes.io/projected/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-kube-api-access-z6mwz\") pod \"neutron-8124-account-create-update-n4jv6\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.759008 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-2z5jj"] Dec 03 16:40:14 crc kubenswrapper[4768]: W1203 16:40:14.769590 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f37bfbc_3d6a_4bc2_be68_bdbd1e6b2129.slice/crio-fe41803191bc74f6968815dca678225ea3eb7e6389defc95f5f3f95bb9f1bead WatchSource:0}: Error finding container fe41803191bc74f6968815dca678225ea3eb7e6389defc95f5f3f95bb9f1bead: Status 404 returned error can't find the container with id fe41803191bc74f6968815dca678225ea3eb7e6389defc95f5f3f95bb9f1bead Dec 03 16:40:14 crc kubenswrapper[4768]: I1203 16:40:14.794019 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.007834 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9kgrw"] Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.087321 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9kgrw" event={"ID":"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97","Type":"ContainerStarted","Data":"961fee84062679503ea7f748713755d0e7b4757f89a7f11b40b56c6ee9e6db64"} Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.091856 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gqbzb" event={"ID":"f14793fd-c763-44bd-b629-3426b1ccc605","Type":"ContainerStarted","Data":"9dce5aa35f53cfc163fb2d3386bd311e936006217409997feac77cd992bdee7c"} Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.100867 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f95e-account-create-update-f4h7x" event={"ID":"bcf11438-066f-4718-9f14-d19c5d998c5a","Type":"ContainerStarted","Data":"ce58dbd66c7da0fac5ca83d7a95f64b1353e2b6b22bf716ebc654bf20a1beac2"} Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.101187 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f95e-account-create-update-f4h7x" event={"ID":"bcf11438-066f-4718-9f14-d19c5d998c5a","Type":"ContainerStarted","Data":"043297d7228afab7d0f6e30d62a430e75edf4bc1d07fb319b10ce3ce74a5db63"} Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.107314 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"b9001f4ae0faa2e8a66296e01848e51ca90135105597bc6a58d278cb4106375b"} Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.107365 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"e7a9e4882f025b46b004c0df9ae41462004fe5d18db3dc41a1dbc0733ddb5689"} Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.112633 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-2z5jj" event={"ID":"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129","Type":"ContainerStarted","Data":"fe41803191bc74f6968815dca678225ea3eb7e6389defc95f5f3f95bb9f1bead"} Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.118981 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-5dfb-account-create-update-txwn7"] Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.119046 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-gqbzb" podStartSLOduration=2.119027749 podStartE2EDuration="2.119027749s" podCreationTimestamp="2025-12-03 16:40:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:15.106667267 +0000 UTC m=+1312.026003690" watchObservedRunningTime="2025-12-03 16:40:15.119027749 +0000 UTC m=+1312.038364172" Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.137860 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-f95e-account-create-update-f4h7x" podStartSLOduration=2.137840284 podStartE2EDuration="2.137840284s" podCreationTimestamp="2025-12-03 16:40:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:15.12204052 +0000 UTC m=+1312.041376943" watchObservedRunningTime="2025-12-03 16:40:15.137840284 +0000 UTC m=+1312.057176707" Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.938197 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-d268s"] Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.965849 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-2a41-account-create-update-qz7zm"] Dec 03 16:40:15 crc kubenswrapper[4768]: I1203 16:40:15.991719 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-6j9c8"] Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.072551 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-8124-account-create-update-n4jv6"] Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.138199 4768 generic.go:334] "Generic (PLEG): container finished" podID="b1c3d427-12ff-4e36-bfaf-eebb41eb2c97" containerID="08cdb5aeff4008b72e86d49c18e4a9d4e99fd4c474e123b5f47954964cbf8053" exitCode=0 Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.138770 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9kgrw" event={"ID":"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97","Type":"ContainerDied","Data":"08cdb5aeff4008b72e86d49c18e4a9d4e99fd4c474e123b5f47954964cbf8053"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.140826 4768 generic.go:334] "Generic (PLEG): container finished" podID="f14793fd-c763-44bd-b629-3426b1ccc605" containerID="5c458684187373aa1d4e334cd9c7945cb719757aec798b5c74415d02f4bf83ee" exitCode=0 Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.140878 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gqbzb" event={"ID":"f14793fd-c763-44bd-b629-3426b1ccc605","Type":"ContainerDied","Data":"5c458684187373aa1d4e334cd9c7945cb719757aec798b5c74415d02f4bf83ee"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.142471 4768 generic.go:334] "Generic (PLEG): container finished" podID="bcf11438-066f-4718-9f14-d19c5d998c5a" containerID="ce58dbd66c7da0fac5ca83d7a95f64b1353e2b6b22bf716ebc654bf20a1beac2" exitCode=0 Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.142504 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f95e-account-create-update-f4h7x" event={"ID":"bcf11438-066f-4718-9f14-d19c5d998c5a","Type":"ContainerDied","Data":"ce58dbd66c7da0fac5ca83d7a95f64b1353e2b6b22bf716ebc654bf20a1beac2"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.155726 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"e74ffc6588b30a5f4aba743961dd5fdde96d5e50d21a3e29d1aebc02811b500d"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.157088 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" event={"ID":"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5","Type":"ContainerStarted","Data":"dbf5298285efd649f7bbd3062ce3e93ebf6584fc8b0f28b6d791e8cc2994afa2"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.159651 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6j9c8" event={"ID":"2a7a3873-e2cc-41e3-9151-715913ade3b2","Type":"ContainerStarted","Data":"cb018027c6383983c1983e140a7b314c4214b20bc4a00d8bbc2d24d878b00237"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.162368 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8124-account-create-update-n4jv6" event={"ID":"31ba5ee1-82b2-4095-a92a-d3f5cd49482c","Type":"ContainerStarted","Data":"5503e600c4644fd48f5614a130ca70da153bded2046bad958bc5df8638435f1a"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.163506 4768 generic.go:334] "Generic (PLEG): container finished" podID="d4e9447e-4441-4183-9511-4780a1af50d4" containerID="0638f152b05881f5b84702ffc7c29da99a2318d9a1e0500beed153f735980ead" exitCode=0 Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.163566 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5dfb-account-create-update-txwn7" event={"ID":"d4e9447e-4441-4183-9511-4780a1af50d4","Type":"ContainerDied","Data":"0638f152b05881f5b84702ffc7c29da99a2318d9a1e0500beed153f735980ead"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.163678 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5dfb-account-create-update-txwn7" event={"ID":"d4e9447e-4441-4183-9511-4780a1af50d4","Type":"ContainerStarted","Data":"5635ffe34a32a1a772c15d511c225a8310b33f91e46467b99e2b46c44b8ce1c6"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.166388 4768 generic.go:334] "Generic (PLEG): container finished" podID="5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129" containerID="528926e7c28d646c5d65cdf6d040ca17652d74ecadaff72ebeb8f3f23dd7b279" exitCode=0 Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.166561 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-2z5jj" event={"ID":"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129","Type":"ContainerDied","Data":"528926e7c28d646c5d65cdf6d040ca17652d74ecadaff72ebeb8f3f23dd7b279"} Dec 03 16:40:16 crc kubenswrapper[4768]: I1203 16:40:16.168017 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-d268s" event={"ID":"4da4c050-3db3-42cc-9b36-e65917c8b977","Type":"ContainerStarted","Data":"a6256efe668c97f5c664d006fb201b3968e0847447f72a7084b911a299d9c532"} Dec 03 16:40:17 crc kubenswrapper[4768]: I1203 16:40:17.179713 4768 generic.go:334] "Generic (PLEG): container finished" podID="4da4c050-3db3-42cc-9b36-e65917c8b977" containerID="9e7afe659c82eac06ee9678c1cf3d5d00652b9c957a9b9f70940eeb8cb72e7f5" exitCode=0 Dec 03 16:40:17 crc kubenswrapper[4768]: I1203 16:40:17.179786 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-d268s" event={"ID":"4da4c050-3db3-42cc-9b36-e65917c8b977","Type":"ContainerDied","Data":"9e7afe659c82eac06ee9678c1cf3d5d00652b9c957a9b9f70940eeb8cb72e7f5"} Dec 03 16:40:17 crc kubenswrapper[4768]: I1203 16:40:17.184406 4768 generic.go:334] "Generic (PLEG): container finished" podID="bc5f87cd-fa59-4074-b87c-3e3f5760ddb5" containerID="3b06e68d57d4a33409a06a905f8b8c6979ab4a98da40194e7fb1dc86da094644" exitCode=0 Dec 03 16:40:17 crc kubenswrapper[4768]: I1203 16:40:17.184471 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" event={"ID":"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5","Type":"ContainerDied","Data":"3b06e68d57d4a33409a06a905f8b8c6979ab4a98da40194e7fb1dc86da094644"} Dec 03 16:40:17 crc kubenswrapper[4768]: I1203 16:40:17.186434 4768 generic.go:334] "Generic (PLEG): container finished" podID="31ba5ee1-82b2-4095-a92a-d3f5cd49482c" containerID="5b343baf0a51bb146381fa15c64630f494bdc03631eb71d9f37a17518745dd77" exitCode=0 Dec 03 16:40:17 crc kubenswrapper[4768]: I1203 16:40:17.186496 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8124-account-create-update-n4jv6" event={"ID":"31ba5ee1-82b2-4095-a92a-d3f5cd49482c","Type":"ContainerDied","Data":"5b343baf0a51bb146381fa15c64630f494bdc03631eb71d9f37a17518745dd77"} Dec 03 16:40:17 crc kubenswrapper[4768]: I1203 16:40:17.748822 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="5c0ad451-c513-4f94-ac08-aaa2c7df9ae8" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 16:40:19 crc kubenswrapper[4768]: I1203 16:40:19.212887 4768 generic.go:334] "Generic (PLEG): container finished" podID="98a2cac0-c468-421c-8acd-b7f7e3b471ea" containerID="c3afe68f2da1af9f8f3d7b6df856457d14ce22955937677e7224e06320891556" exitCode=0 Dec 03 16:40:19 crc kubenswrapper[4768]: I1203 16:40:19.212936 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98a2cac0-c468-421c-8acd-b7f7e3b471ea","Type":"ContainerDied","Data":"c3afe68f2da1af9f8f3d7b6df856457d14ce22955937677e7224e06320891556"} Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.029054 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.029357 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:40:26 crc kubenswrapper[4768]: E1203 16:40:26.755552 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 03 16:40:26 crc kubenswrapper[4768]: E1203 16:40:26.756038 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d4fnp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-zqcnl_openstack(495fd475-f8cd-4fd9-86d4-cdf7765e7ad6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:40:26 crc kubenswrapper[4768]: E1203 16:40:26.757284 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-zqcnl" podUID="495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.874392 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.878526 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.884658 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.889792 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.895894 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-d268s" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.915802 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4da4c050-3db3-42cc-9b36-e65917c8b977-operator-scripts\") pod \"4da4c050-3db3-42cc-9b36-e65917c8b977\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.915850 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mx2zm\" (UniqueName: \"kubernetes.io/projected/4da4c050-3db3-42cc-9b36-e65917c8b977-kube-api-access-mx2zm\") pod \"4da4c050-3db3-42cc-9b36-e65917c8b977\" (UID: \"4da4c050-3db3-42cc-9b36-e65917c8b977\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.915874 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf11438-066f-4718-9f14-d19c5d998c5a-operator-scripts\") pod \"bcf11438-066f-4718-9f14-d19c5d998c5a\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.915902 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxhmh\" (UniqueName: \"kubernetes.io/projected/f14793fd-c763-44bd-b629-3426b1ccc605-kube-api-access-wxhmh\") pod \"f14793fd-c763-44bd-b629-3426b1ccc605\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.915930 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-operator-scripts\") pod \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.915995 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-operator-scripts\") pod \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.916034 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lj8k\" (UniqueName: \"kubernetes.io/projected/bcf11438-066f-4718-9f14-d19c5d998c5a-kube-api-access-2lj8k\") pod \"bcf11438-066f-4718-9f14-d19c5d998c5a\" (UID: \"bcf11438-066f-4718-9f14-d19c5d998c5a\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.916096 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hrzp\" (UniqueName: \"kubernetes.io/projected/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-kube-api-access-8hrzp\") pod \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\" (UID: \"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.916139 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt7c6\" (UniqueName: \"kubernetes.io/projected/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-kube-api-access-jt7c6\") pod \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\" (UID: \"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.916180 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14793fd-c763-44bd-b629-3426b1ccc605-operator-scripts\") pod \"f14793fd-c763-44bd-b629-3426b1ccc605\" (UID: \"f14793fd-c763-44bd-b629-3426b1ccc605\") " Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.916340 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4da4c050-3db3-42cc-9b36-e65917c8b977-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4da4c050-3db3-42cc-9b36-e65917c8b977" (UID: "4da4c050-3db3-42cc-9b36-e65917c8b977"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.916578 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4da4c050-3db3-42cc-9b36-e65917c8b977-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.916697 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129" (UID: "5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.917037 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b1c3d427-12ff-4e36-bfaf-eebb41eb2c97" (UID: "b1c3d427-12ff-4e36-bfaf-eebb41eb2c97"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.917373 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcf11438-066f-4718-9f14-d19c5d998c5a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bcf11438-066f-4718-9f14-d19c5d998c5a" (UID: "bcf11438-066f-4718-9f14-d19c5d998c5a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.935271 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f14793fd-c763-44bd-b629-3426b1ccc605-kube-api-access-wxhmh" (OuterVolumeSpecName: "kube-api-access-wxhmh") pod "f14793fd-c763-44bd-b629-3426b1ccc605" (UID: "f14793fd-c763-44bd-b629-3426b1ccc605"). InnerVolumeSpecName "kube-api-access-wxhmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.935345 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-kube-api-access-jt7c6" (OuterVolumeSpecName: "kube-api-access-jt7c6") pod "5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129" (UID: "5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129"). InnerVolumeSpecName "kube-api-access-jt7c6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.936852 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.941872 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f14793fd-c763-44bd-b629-3426b1ccc605-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f14793fd-c763-44bd-b629-3426b1ccc605" (UID: "f14793fd-c763-44bd-b629-3426b1ccc605"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.943979 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcf11438-066f-4718-9f14-d19c5d998c5a-kube-api-access-2lj8k" (OuterVolumeSpecName: "kube-api-access-2lj8k") pod "bcf11438-066f-4718-9f14-d19c5d998c5a" (UID: "bcf11438-066f-4718-9f14-d19c5d998c5a"). InnerVolumeSpecName "kube-api-access-2lj8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.944098 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4da4c050-3db3-42cc-9b36-e65917c8b977-kube-api-access-mx2zm" (OuterVolumeSpecName: "kube-api-access-mx2zm") pod "4da4c050-3db3-42cc-9b36-e65917c8b977" (UID: "4da4c050-3db3-42cc-9b36-e65917c8b977"). InnerVolumeSpecName "kube-api-access-mx2zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:26 crc kubenswrapper[4768]: I1203 16:40:26.982923 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-kube-api-access-8hrzp" (OuterVolumeSpecName: "kube-api-access-8hrzp") pod "b1c3d427-12ff-4e36-bfaf-eebb41eb2c97" (UID: "b1c3d427-12ff-4e36-bfaf-eebb41eb2c97"). InnerVolumeSpecName "kube-api-access-8hrzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.018557 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e9447e-4441-4183-9511-4780a1af50d4-operator-scripts\") pod \"d4e9447e-4441-4183-9511-4780a1af50d4\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.018637 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxb4m\" (UniqueName: \"kubernetes.io/projected/d4e9447e-4441-4183-9511-4780a1af50d4-kube-api-access-wxb4m\") pod \"d4e9447e-4441-4183-9511-4780a1af50d4\" (UID: \"d4e9447e-4441-4183-9511-4780a1af50d4\") " Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.019893 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lj8k\" (UniqueName: \"kubernetes.io/projected/bcf11438-066f-4718-9f14-d19c5d998c5a-kube-api-access-2lj8k\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.019926 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hrzp\" (UniqueName: \"kubernetes.io/projected/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-kube-api-access-8hrzp\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.019936 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt7c6\" (UniqueName: \"kubernetes.io/projected/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-kube-api-access-jt7c6\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.020004 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f14793fd-c763-44bd-b629-3426b1ccc605-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.020019 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mx2zm\" (UniqueName: \"kubernetes.io/projected/4da4c050-3db3-42cc-9b36-e65917c8b977-kube-api-access-mx2zm\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.020029 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bcf11438-066f-4718-9f14-d19c5d998c5a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.020038 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxhmh\" (UniqueName: \"kubernetes.io/projected/f14793fd-c763-44bd-b629-3426b1ccc605-kube-api-access-wxhmh\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.020047 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.020059 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.020130 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4e9447e-4441-4183-9511-4780a1af50d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d4e9447e-4441-4183-9511-4780a1af50d4" (UID: "d4e9447e-4441-4183-9511-4780a1af50d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.021669 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.030539 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4e9447e-4441-4183-9511-4780a1af50d4-kube-api-access-wxb4m" (OuterVolumeSpecName: "kube-api-access-wxb4m") pod "d4e9447e-4441-4183-9511-4780a1af50d4" (UID: "d4e9447e-4441-4183-9511-4780a1af50d4"). InnerVolumeSpecName "kube-api-access-wxb4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.032366 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.123293 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-operator-scripts\") pod \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.123368 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6mwz\" (UniqueName: \"kubernetes.io/projected/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-kube-api-access-z6mwz\") pod \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.123392 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcvbj\" (UniqueName: \"kubernetes.io/projected/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-kube-api-access-wcvbj\") pod \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\" (UID: \"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5\") " Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.123549 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-operator-scripts\") pod \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\" (UID: \"31ba5ee1-82b2-4095-a92a-d3f5cd49482c\") " Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.123889 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bc5f87cd-fa59-4074-b87c-3e3f5760ddb5" (UID: "bc5f87cd-fa59-4074-b87c-3e3f5760ddb5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.124310 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "31ba5ee1-82b2-4095-a92a-d3f5cd49482c" (UID: "31ba5ee1-82b2-4095-a92a-d3f5cd49482c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.124472 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.124500 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e9447e-4441-4183-9511-4780a1af50d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.124513 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxb4m\" (UniqueName: \"kubernetes.io/projected/d4e9447e-4441-4183-9511-4780a1af50d4-kube-api-access-wxb4m\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.124526 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.126381 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-kube-api-access-z6mwz" (OuterVolumeSpecName: "kube-api-access-z6mwz") pod "31ba5ee1-82b2-4095-a92a-d3f5cd49482c" (UID: "31ba5ee1-82b2-4095-a92a-d3f5cd49482c"). InnerVolumeSpecName "kube-api-access-z6mwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.126951 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-kube-api-access-wcvbj" (OuterVolumeSpecName: "kube-api-access-wcvbj") pod "bc5f87cd-fa59-4074-b87c-3e3f5760ddb5" (UID: "bc5f87cd-fa59-4074-b87c-3e3f5760ddb5"). InnerVolumeSpecName "kube-api-access-wcvbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.226181 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6mwz\" (UniqueName: \"kubernetes.io/projected/31ba5ee1-82b2-4095-a92a-d3f5cd49482c-kube-api-access-z6mwz\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.226217 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcvbj\" (UniqueName: \"kubernetes.io/projected/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5-kube-api-access-wcvbj\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.296096 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9kgrw" event={"ID":"b1c3d427-12ff-4e36-bfaf-eebb41eb2c97","Type":"ContainerDied","Data":"961fee84062679503ea7f748713755d0e7b4757f89a7f11b40b56c6ee9e6db64"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.296135 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="961fee84062679503ea7f748713755d0e7b4757f89a7f11b40b56c6ee9e6db64" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.296196 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9kgrw" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.305435 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-gqbzb" event={"ID":"f14793fd-c763-44bd-b629-3426b1ccc605","Type":"ContainerDied","Data":"9dce5aa35f53cfc163fb2d3386bd311e936006217409997feac77cd992bdee7c"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.305462 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dce5aa35f53cfc163fb2d3386bd311e936006217409997feac77cd992bdee7c" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.305488 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-gqbzb" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.318456 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-f95e-account-create-update-f4h7x" event={"ID":"bcf11438-066f-4718-9f14-d19c5d998c5a","Type":"ContainerDied","Data":"043297d7228afab7d0f6e30d62a430e75edf4bc1d07fb319b10ce3ce74a5db63"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.318490 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="043297d7228afab7d0f6e30d62a430e75edf4bc1d07fb319b10ce3ce74a5db63" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.318550 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-f95e-account-create-update-f4h7x" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.325202 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.325337 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-2a41-account-create-update-qz7zm" event={"ID":"bc5f87cd-fa59-4074-b87c-3e3f5760ddb5","Type":"ContainerDied","Data":"dbf5298285efd649f7bbd3062ce3e93ebf6584fc8b0f28b6d791e8cc2994afa2"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.325413 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbf5298285efd649f7bbd3062ce3e93ebf6584fc8b0f28b6d791e8cc2994afa2" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.331308 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-5dfb-account-create-update-txwn7" event={"ID":"d4e9447e-4441-4183-9511-4780a1af50d4","Type":"ContainerDied","Data":"5635ffe34a32a1a772c15d511c225a8310b33f91e46467b99e2b46c44b8ce1c6"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.331340 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-5dfb-account-create-update-txwn7" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.331346 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5635ffe34a32a1a772c15d511c225a8310b33f91e46467b99e2b46c44b8ce1c6" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.333209 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-2z5jj" event={"ID":"5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129","Type":"ContainerDied","Data":"fe41803191bc74f6968815dca678225ea3eb7e6389defc95f5f3f95bb9f1bead"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.333230 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe41803191bc74f6968815dca678225ea3eb7e6389defc95f5f3f95bb9f1bead" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.333374 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-2z5jj" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.335074 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-8124-account-create-update-n4jv6" event={"ID":"31ba5ee1-82b2-4095-a92a-d3f5cd49482c","Type":"ContainerDied","Data":"5503e600c4644fd48f5614a130ca70da153bded2046bad958bc5df8638435f1a"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.335109 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5503e600c4644fd48f5614a130ca70da153bded2046bad958bc5df8638435f1a" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.335168 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-8124-account-create-update-n4jv6" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.350177 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-d268s" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.350270 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-d268s" event={"ID":"4da4c050-3db3-42cc-9b36-e65917c8b977","Type":"ContainerDied","Data":"a6256efe668c97f5c664d006fb201b3968e0847447f72a7084b911a299d9c532"} Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.350329 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6256efe668c97f5c664d006fb201b3968e0847447f72a7084b911a299d9c532" Dec 03 16:40:27 crc kubenswrapper[4768]: E1203 16:40:27.357037 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-zqcnl" podUID="495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" Dec 03 16:40:27 crc kubenswrapper[4768]: I1203 16:40:27.687686 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 03 16:40:30 crc kubenswrapper[4768]: E1203 16:40:30.135579 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-keystone:current-podified" Dec 03 16:40:30 crc kubenswrapper[4768]: E1203 16:40:30.136073 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:keystone-db-sync,Image:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,Command:[/bin/bash],Args:[-c keystone-manage db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/keystone/keystone.conf,SubPath:keystone.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hzdnb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42425,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42425,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-db-sync-6j9c8_openstack(2a7a3873-e2cc-41e3-9151-715913ade3b2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:40:30 crc kubenswrapper[4768]: E1203 16:40:30.137257 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/keystone-db-sync-6j9c8" podUID="2a7a3873-e2cc-41e3-9151-715913ade3b2" Dec 03 16:40:30 crc kubenswrapper[4768]: I1203 16:40:30.378747 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98a2cac0-c468-421c-8acd-b7f7e3b471ea","Type":"ContainerStarted","Data":"0c4447208f9b9d62fe55b00e2fdac74977e21d1f680c8af9eab4eb76f3324233"} Dec 03 16:40:30 crc kubenswrapper[4768]: E1203 16:40:30.380497 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-keystone:current-podified\\\"\"" pod="openstack/keystone-db-sync-6j9c8" podUID="2a7a3873-e2cc-41e3-9151-715913ade3b2" Dec 03 16:40:31 crc kubenswrapper[4768]: I1203 16:40:31.390679 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"f338bec0b07905eee897144d14c3a3ab51f96741124a0a90b3c6ded3de5d3f97"} Dec 03 16:40:31 crc kubenswrapper[4768]: I1203 16:40:31.391068 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"86c97ade7a2d43408ab5b318efe498ea0db7a5c6604e08e8f160725a04f5abfa"} Dec 03 16:40:32 crc kubenswrapper[4768]: I1203 16:40:32.423299 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"0fe13f1232e3d0c117495309aee8d821fd23ebcf2b2bc9ab669317350cb83cff"} Dec 03 16:40:32 crc kubenswrapper[4768]: I1203 16:40:32.423654 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"5b11e013561710680c477854c782747c712a33d5537055a9139f7c109892028a"} Dec 03 16:40:33 crc kubenswrapper[4768]: I1203 16:40:33.436050 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98a2cac0-c468-421c-8acd-b7f7e3b471ea","Type":"ContainerStarted","Data":"6d446340ecdf44b0cf83eecc631fae4261c0580368c904d86da7674994406c48"} Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.449267 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"98a2cac0-c468-421c-8acd-b7f7e3b471ea","Type":"ContainerStarted","Data":"52daff43733b15196043a1e1fbec5e477c1eba44f246006246c362e8184de85c"} Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.456304 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"5eb680369a2a184912feaddcb68ac017e7bb90f46f9c8b68a5d1904e39447bf1"} Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.456338 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"babe9d39562f2d9b1d4804406aa33e73ae80e13ed3d2255527bae9d0babcd38b"} Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.456346 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"5a32c224ea24e30219a83cc4ca14f54d9f80da420fc0d7beb33ed18b96c88062"} Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.456356 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"67240026d3a42478fa30aaeb0c415bf388eeaad94e63d50408f5036fef8b91e0"} Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.456364 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"6f515a987673a01784f9e6e2a061a3ffd907ae636a962e017ae75fedb3f74f8a"} Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.484683 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=26.484669625 podStartE2EDuration="26.484669625s" podCreationTimestamp="2025-12-03 16:40:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:34.482167898 +0000 UTC m=+1331.401504331" watchObservedRunningTime="2025-12-03 16:40:34.484669625 +0000 UTC m=+1331.404006048" Dec 03 16:40:34 crc kubenswrapper[4768]: I1203 16:40:34.585395 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.486696 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"30d943d49a29c1986d021ea5dbb545a5563e615c0de322c75f0a76c7fe0013e5"} Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.487812 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"147901f2-6caa-4983-8e45-7e938cd9f36b","Type":"ContainerStarted","Data":"a581d7824ad8802476a43b5a3575a185938c1a42441851231649ad3c8b3d99e8"} Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.534824 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.136708938 podStartE2EDuration="57.534787727s" podCreationTimestamp="2025-12-03 16:39:38 +0000 UTC" firstStartedPulling="2025-12-03 16:40:11.925284058 +0000 UTC m=+1308.844620471" lastFinishedPulling="2025-12-03 16:40:33.323362827 +0000 UTC m=+1330.242699260" observedRunningTime="2025-12-03 16:40:35.527570724 +0000 UTC m=+1332.446907167" watchObservedRunningTime="2025-12-03 16:40:35.534787727 +0000 UTC m=+1332.454124210" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.821469 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-mfh94"] Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.822318 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823133 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.823202 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5f87cd-fa59-4074-b87c-3e3f5760ddb5" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823252 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5f87cd-fa59-4074-b87c-3e3f5760ddb5" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.823306 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4da4c050-3db3-42cc-9b36-e65917c8b977" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823350 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4da4c050-3db3-42cc-9b36-e65917c8b977" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.823399 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ba5ee1-82b2-4095-a92a-d3f5cd49482c" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823443 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ba5ee1-82b2-4095-a92a-d3f5cd49482c" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.823500 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e9447e-4441-4183-9511-4780a1af50d4" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823549 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e9447e-4441-4183-9511-4780a1af50d4" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.823623 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f14793fd-c763-44bd-b629-3426b1ccc605" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823681 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f14793fd-c763-44bd-b629-3426b1ccc605" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.823743 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcf11438-066f-4718-9f14-d19c5d998c5a" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823789 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcf11438-066f-4718-9f14-d19c5d998c5a" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: E1203 16:40:35.823841 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1c3d427-12ff-4e36-bfaf-eebb41eb2c97" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.823909 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1c3d427-12ff-4e36-bfaf-eebb41eb2c97" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824153 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="31ba5ee1-82b2-4095-a92a-d3f5cd49482c" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824212 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc5f87cd-fa59-4074-b87c-3e3f5760ddb5" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824268 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4e9447e-4441-4183-9511-4780a1af50d4" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824318 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcf11438-066f-4718-9f14-d19c5d998c5a" containerName="mariadb-account-create-update" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824370 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f14793fd-c763-44bd-b629-3426b1ccc605" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824421 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824474 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4da4c050-3db3-42cc-9b36-e65917c8b977" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.824527 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1c3d427-12ff-4e36-bfaf-eebb41eb2c97" containerName="mariadb-database-create" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.825559 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.828714 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.870651 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-mfh94"] Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.948746 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-svc\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.948855 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxrzx\" (UniqueName: \"kubernetes.io/projected/c84dbd36-bce8-4dec-a0f7-56ad2b103209-kube-api-access-nxrzx\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.948900 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.948935 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.949025 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-config\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:35 crc kubenswrapper[4768]: I1203 16:40:35.949067 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.050336 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-svc\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.050410 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxrzx\" (UniqueName: \"kubernetes.io/projected/c84dbd36-bce8-4dec-a0f7-56ad2b103209-kube-api-access-nxrzx\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.050433 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.050451 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.050497 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-config\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.050521 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.051393 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-nb\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.051389 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-svc\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.051668 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-swift-storage-0\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.051881 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-config\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.052003 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-sb\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.075010 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxrzx\" (UniqueName: \"kubernetes.io/projected/c84dbd36-bce8-4dec-a0f7-56ad2b103209-kube-api-access-nxrzx\") pod \"dnsmasq-dns-764c5664d7-mfh94\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.141833 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:36 crc kubenswrapper[4768]: I1203 16:40:36.668493 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-mfh94"] Dec 03 16:40:37 crc kubenswrapper[4768]: I1203 16:40:37.501675 4768 generic.go:334] "Generic (PLEG): container finished" podID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerID="6a57611ccdd2cc8fd6bb95b3690b756401eb27be13412a3950e12345c202d39c" exitCode=0 Dec 03 16:40:37 crc kubenswrapper[4768]: I1203 16:40:37.501770 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" event={"ID":"c84dbd36-bce8-4dec-a0f7-56ad2b103209","Type":"ContainerDied","Data":"6a57611ccdd2cc8fd6bb95b3690b756401eb27be13412a3950e12345c202d39c"} Dec 03 16:40:37 crc kubenswrapper[4768]: I1203 16:40:37.501995 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" event={"ID":"c84dbd36-bce8-4dec-a0f7-56ad2b103209","Type":"ContainerStarted","Data":"ded71fdc2218f62e0e90ec45daf8b460aaa5aa1a3e3df71db2111b0996bc0fd9"} Dec 03 16:40:38 crc kubenswrapper[4768]: I1203 16:40:38.512853 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" event={"ID":"c84dbd36-bce8-4dec-a0f7-56ad2b103209","Type":"ContainerStarted","Data":"026d13935daab3d77076152d64aebf5aa64572576bea932daa7051413bc03976"} Dec 03 16:40:38 crc kubenswrapper[4768]: I1203 16:40:38.513259 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:39 crc kubenswrapper[4768]: I1203 16:40:39.584951 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:39 crc kubenswrapper[4768]: I1203 16:40:39.595110 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:39 crc kubenswrapper[4768]: I1203 16:40:39.648064 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" podStartSLOduration=4.648032623 podStartE2EDuration="4.648032623s" podCreationTimestamp="2025-12-03 16:40:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:38.543287364 +0000 UTC m=+1335.462623787" watchObservedRunningTime="2025-12-03 16:40:39.648032623 +0000 UTC m=+1336.567369086" Dec 03 16:40:40 crc kubenswrapper[4768]: I1203 16:40:40.548940 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 03 16:40:42 crc kubenswrapper[4768]: I1203 16:40:42.561885 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zqcnl" event={"ID":"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6","Type":"ContainerStarted","Data":"d50da033304a43f898c6f04a390690a054e0a05f712a7c3a1c7897acc2425d43"} Dec 03 16:40:45 crc kubenswrapper[4768]: I1203 16:40:45.561960 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-zqcnl" podStartSLOduration=6.269470311 podStartE2EDuration="39.561942957s" podCreationTimestamp="2025-12-03 16:40:06 +0000 UTC" firstStartedPulling="2025-12-03 16:40:08.014837548 +0000 UTC m=+1304.934173961" lastFinishedPulling="2025-12-03 16:40:41.307310174 +0000 UTC m=+1338.226646607" observedRunningTime="2025-12-03 16:40:42.586751816 +0000 UTC m=+1339.506088259" watchObservedRunningTime="2025-12-03 16:40:45.561942957 +0000 UTC m=+1342.481279380" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.143647 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.222066 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-vlllk"] Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.225244 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-vlllk" podUID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerName="dnsmasq-dns" containerID="cri-o://c3c9d658027b470b635e86d7e62452e77fcb774e7a473166f03acadf73b8f4e6" gracePeriod=10 Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.605218 4768 generic.go:334] "Generic (PLEG): container finished" podID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerID="c3c9d658027b470b635e86d7e62452e77fcb774e7a473166f03acadf73b8f4e6" exitCode=0 Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.605366 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-vlllk" event={"ID":"ec1d296a-9fca-4e6d-8463-fc948d519080","Type":"ContainerDied","Data":"c3c9d658027b470b635e86d7e62452e77fcb774e7a473166f03acadf73b8f4e6"} Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.607762 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6j9c8" event={"ID":"2a7a3873-e2cc-41e3-9151-715913ade3b2","Type":"ContainerStarted","Data":"cdc3d205bae4c7f868ae3fa4761b22816cd49c390585fab5ed2111cb0588e1a9"} Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.627835 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-6j9c8" podStartSLOduration=3.352796273 podStartE2EDuration="33.627816503s" podCreationTimestamp="2025-12-03 16:40:13 +0000 UTC" firstStartedPulling="2025-12-03 16:40:16.030429546 +0000 UTC m=+1312.949765969" lastFinishedPulling="2025-12-03 16:40:46.305449776 +0000 UTC m=+1343.224786199" observedRunningTime="2025-12-03 16:40:46.620856186 +0000 UTC m=+1343.540192609" watchObservedRunningTime="2025-12-03 16:40:46.627816503 +0000 UTC m=+1343.547152936" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.721125 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.859352 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-nb\") pod \"ec1d296a-9fca-4e6d-8463-fc948d519080\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.859416 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-dns-svc\") pod \"ec1d296a-9fca-4e6d-8463-fc948d519080\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.859438 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgbtr\" (UniqueName: \"kubernetes.io/projected/ec1d296a-9fca-4e6d-8463-fc948d519080-kube-api-access-pgbtr\") pod \"ec1d296a-9fca-4e6d-8463-fc948d519080\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.860363 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-sb\") pod \"ec1d296a-9fca-4e6d-8463-fc948d519080\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.860453 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-config\") pod \"ec1d296a-9fca-4e6d-8463-fc948d519080\" (UID: \"ec1d296a-9fca-4e6d-8463-fc948d519080\") " Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.864475 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec1d296a-9fca-4e6d-8463-fc948d519080-kube-api-access-pgbtr" (OuterVolumeSpecName: "kube-api-access-pgbtr") pod "ec1d296a-9fca-4e6d-8463-fc948d519080" (UID: "ec1d296a-9fca-4e6d-8463-fc948d519080"). InnerVolumeSpecName "kube-api-access-pgbtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.909339 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ec1d296a-9fca-4e6d-8463-fc948d519080" (UID: "ec1d296a-9fca-4e6d-8463-fc948d519080"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.910856 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ec1d296a-9fca-4e6d-8463-fc948d519080" (UID: "ec1d296a-9fca-4e6d-8463-fc948d519080"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.918398 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-config" (OuterVolumeSpecName: "config") pod "ec1d296a-9fca-4e6d-8463-fc948d519080" (UID: "ec1d296a-9fca-4e6d-8463-fc948d519080"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.924137 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ec1d296a-9fca-4e6d-8463-fc948d519080" (UID: "ec1d296a-9fca-4e6d-8463-fc948d519080"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.962930 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.962964 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.962978 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgbtr\" (UniqueName: \"kubernetes.io/projected/ec1d296a-9fca-4e6d-8463-fc948d519080-kube-api-access-pgbtr\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.962988 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:46 crc kubenswrapper[4768]: I1203 16:40:46.962997 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec1d296a-9fca-4e6d-8463-fc948d519080-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:47 crc kubenswrapper[4768]: I1203 16:40:47.640170 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-vlllk" event={"ID":"ec1d296a-9fca-4e6d-8463-fc948d519080","Type":"ContainerDied","Data":"f0d7e2a21b256ef2a962d2d0a1d6d9a2275b897fa892f318b49962da02982ef6"} Dec 03 16:40:47 crc kubenswrapper[4768]: I1203 16:40:47.641451 4768 scope.go:117] "RemoveContainer" containerID="c3c9d658027b470b635e86d7e62452e77fcb774e7a473166f03acadf73b8f4e6" Dec 03 16:40:47 crc kubenswrapper[4768]: I1203 16:40:47.640245 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-vlllk" Dec 03 16:40:47 crc kubenswrapper[4768]: I1203 16:40:47.669236 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-vlllk"] Dec 03 16:40:47 crc kubenswrapper[4768]: I1203 16:40:47.674248 4768 scope.go:117] "RemoveContainer" containerID="3d4feec0454a3b7dda5e04a16e4b8923bab55feae258d992d084d7237bc264b1" Dec 03 16:40:47 crc kubenswrapper[4768]: I1203 16:40:47.679713 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-vlllk"] Dec 03 16:40:49 crc kubenswrapper[4768]: I1203 16:40:49.541919 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec1d296a-9fca-4e6d-8463-fc948d519080" path="/var/lib/kubelet/pods/ec1d296a-9fca-4e6d-8463-fc948d519080/volumes" Dec 03 16:40:49 crc kubenswrapper[4768]: I1203 16:40:49.665342 4768 generic.go:334] "Generic (PLEG): container finished" podID="2a7a3873-e2cc-41e3-9151-715913ade3b2" containerID="cdc3d205bae4c7f868ae3fa4761b22816cd49c390585fab5ed2111cb0588e1a9" exitCode=0 Dec 03 16:40:49 crc kubenswrapper[4768]: I1203 16:40:49.665841 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6j9c8" event={"ID":"2a7a3873-e2cc-41e3-9151-715913ade3b2","Type":"ContainerDied","Data":"cdc3d205bae4c7f868ae3fa4761b22816cd49c390585fab5ed2111cb0588e1a9"} Dec 03 16:40:49 crc kubenswrapper[4768]: I1203 16:40:49.667522 4768 generic.go:334] "Generic (PLEG): container finished" podID="495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" containerID="d50da033304a43f898c6f04a390690a054e0a05f712a7c3a1c7897acc2425d43" exitCode=0 Dec 03 16:40:49 crc kubenswrapper[4768]: I1203 16:40:49.667553 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zqcnl" event={"ID":"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6","Type":"ContainerDied","Data":"d50da033304a43f898c6f04a390690a054e0a05f712a7c3a1c7897acc2425d43"} Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.052079 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.171449 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-config-data\") pod \"2a7a3873-e2cc-41e3-9151-715913ade3b2\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.171514 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-combined-ca-bundle\") pod \"2a7a3873-e2cc-41e3-9151-715913ade3b2\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.171575 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzdnb\" (UniqueName: \"kubernetes.io/projected/2a7a3873-e2cc-41e3-9151-715913ade3b2-kube-api-access-hzdnb\") pod \"2a7a3873-e2cc-41e3-9151-715913ade3b2\" (UID: \"2a7a3873-e2cc-41e3-9151-715913ade3b2\") " Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.188035 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a7a3873-e2cc-41e3-9151-715913ade3b2-kube-api-access-hzdnb" (OuterVolumeSpecName: "kube-api-access-hzdnb") pod "2a7a3873-e2cc-41e3-9151-715913ade3b2" (UID: "2a7a3873-e2cc-41e3-9151-715913ade3b2"). InnerVolumeSpecName "kube-api-access-hzdnb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.201990 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2a7a3873-e2cc-41e3-9151-715913ade3b2" (UID: "2a7a3873-e2cc-41e3-9151-715913ade3b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.225267 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-config-data" (OuterVolumeSpecName: "config-data") pod "2a7a3873-e2cc-41e3-9151-715913ade3b2" (UID: "2a7a3873-e2cc-41e3-9151-715913ade3b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.246352 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.276762 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzdnb\" (UniqueName: \"kubernetes.io/projected/2a7a3873-e2cc-41e3-9151-715913ade3b2-kube-api-access-hzdnb\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.276794 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.276804 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2a7a3873-e2cc-41e3-9151-715913ade3b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.378214 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-config-data\") pod \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.378309 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-db-sync-config-data\") pod \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.378545 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4fnp\" (UniqueName: \"kubernetes.io/projected/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-kube-api-access-d4fnp\") pod \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.378621 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-combined-ca-bundle\") pod \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\" (UID: \"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6\") " Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.381328 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-kube-api-access-d4fnp" (OuterVolumeSpecName: "kube-api-access-d4fnp") pod "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" (UID: "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6"). InnerVolumeSpecName "kube-api-access-d4fnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.382227 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" (UID: "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.405217 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" (UID: "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.429393 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-config-data" (OuterVolumeSpecName: "config-data") pod "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" (UID: "495fd475-f8cd-4fd9-86d4-cdf7765e7ad6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.481329 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4fnp\" (UniqueName: \"kubernetes.io/projected/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-kube-api-access-d4fnp\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.481365 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.481379 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.481391 4768 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.691137 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-zqcnl" event={"ID":"495fd475-f8cd-4fd9-86d4-cdf7765e7ad6","Type":"ContainerDied","Data":"6d63abc027f272c2c351e3b71095fa275aa5915870e1959a14a673689256b8fb"} Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.691177 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d63abc027f272c2c351e3b71095fa275aa5915870e1959a14a673689256b8fb" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.691908 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-zqcnl" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.693362 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-6j9c8" event={"ID":"2a7a3873-e2cc-41e3-9151-715913ade3b2","Type":"ContainerDied","Data":"cb018027c6383983c1983e140a7b314c4214b20bc4a00d8bbc2d24d878b00237"} Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.693383 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb018027c6383983c1983e140a7b314c4214b20bc4a00d8bbc2d24d878b00237" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.693433 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-6j9c8" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.991408 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fghzs"] Dec 03 16:40:51 crc kubenswrapper[4768]: E1203 16:40:51.991814 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7a3873-e2cc-41e3-9151-715913ade3b2" containerName="keystone-db-sync" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.991833 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7a3873-e2cc-41e3-9151-715913ade3b2" containerName="keystone-db-sync" Dec 03 16:40:51 crc kubenswrapper[4768]: E1203 16:40:51.991843 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" containerName="glance-db-sync" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.991849 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" containerName="glance-db-sync" Dec 03 16:40:51 crc kubenswrapper[4768]: E1203 16:40:51.991868 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerName="init" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.991876 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerName="init" Dec 03 16:40:51 crc kubenswrapper[4768]: E1203 16:40:51.991891 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerName="dnsmasq-dns" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.991897 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerName="dnsmasq-dns" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.992061 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec1d296a-9fca-4e6d-8463-fc948d519080" containerName="dnsmasq-dns" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.992103 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" containerName="glance-db-sync" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.992118 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a7a3873-e2cc-41e3-9151-715913ade3b2" containerName="keystone-db-sync" Dec 03 16:40:51 crc kubenswrapper[4768]: I1203 16:40:51.993125 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.003691 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7g2qs"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.004986 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.008701 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.008922 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-srlwn" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.009952 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.013611 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7g2qs"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.014357 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.019382 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.030690 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fghzs"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192641 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-scripts\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192697 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-config-data\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192747 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192786 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-credential-keys\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192818 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-config\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192857 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192895 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-svc\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192919 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2j6g\" (UniqueName: \"kubernetes.io/projected/e12efeef-87cb-48ae-8626-d6e02ca50b40-kube-api-access-l2j6g\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192959 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-fernet-keys\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.192980 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvbf2\" (UniqueName: \"kubernetes.io/projected/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-kube-api-access-kvbf2\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.193022 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-combined-ca-bundle\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.193042 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.237256 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-6fxl6"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.238603 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.250493 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-mftv7"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.252078 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.259445 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6fxl6"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.271107 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mftv7"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.282400 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.286932 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-tgrnc" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.290783 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.290806 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.291045 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9hz64" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.291168 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296400 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-svc\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296468 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2j6g\" (UniqueName: \"kubernetes.io/projected/e12efeef-87cb-48ae-8626-d6e02ca50b40-kube-api-access-l2j6g\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296523 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-fernet-keys\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296552 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvbf2\" (UniqueName: \"kubernetes.io/projected/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-kube-api-access-kvbf2\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296581 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-combined-ca-bundle\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296635 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296706 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-scripts\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.296735 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-config-data\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.297534 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.297579 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-credential-keys\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.297643 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-config\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.297702 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.298722 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-svc\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.298724 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-nb\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.299454 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-sb\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.299507 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-swift-storage-0\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.302374 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-config\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.310293 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-fernet-keys\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.311900 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-scripts\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.320301 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-combined-ca-bundle\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.321131 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-config-data\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.334900 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-credential-keys\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.337428 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2j6g\" (UniqueName: \"kubernetes.io/projected/e12efeef-87cb-48ae-8626-d6e02ca50b40-kube-api-access-l2j6g\") pod \"keystone-bootstrap-7g2qs\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.343992 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvbf2\" (UniqueName: \"kubernetes.io/projected/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-kube-api-access-kvbf2\") pod \"dnsmasq-dns-5959f8865f-fghzs\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.347808 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.366676 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.369619 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.371371 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.381301 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fghzs"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.388033 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.391189 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.397855 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.399900 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-config\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.399986 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b6jj\" (UniqueName: \"kubernetes.io/projected/f3088df5-8818-432a-997e-d6b6b2d7daca-kube-api-access-7b6jj\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.400010 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-combined-ca-bundle\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.400067 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-db-sync-config-data\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.400100 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-combined-ca-bundle\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.400138 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-etc-machine-id\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.400154 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-scripts\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.400178 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-config-data\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.400215 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65sdw\" (UniqueName: \"kubernetes.io/projected/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-kube-api-access-65sdw\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.406636 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-5vkb8"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.423326 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.439193 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.439764 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.440703 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5vkb8"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.442285 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-mpc26" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503241 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-config-data\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503305 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-db-sync-config-data\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503343 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503364 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpcnh\" (UniqueName: \"kubernetes.io/projected/821ed115-2471-4f5e-8cca-a6a6f35f165f-kube-api-access-vpcnh\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503390 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-combined-ca-bundle\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503427 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-etc-machine-id\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503446 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-scripts\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503477 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-config-data\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503499 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-log-httpd\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503516 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-scripts\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503564 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65sdw\" (UniqueName: \"kubernetes.io/projected/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-kube-api-access-65sdw\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503617 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503670 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-run-httpd\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503700 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-config\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503745 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b6jj\" (UniqueName: \"kubernetes.io/projected/f3088df5-8818-432a-997e-d6b6b2d7daca-kube-api-access-7b6jj\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.503765 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-combined-ca-bundle\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.515644 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-combined-ca-bundle\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.516967 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-config-data\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.519635 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-etc-machine-id\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.526672 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-chllv"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.527387 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-scripts\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.528399 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.535403 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-db-sync-config-data\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.535658 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-config\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.543960 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-combined-ca-bundle\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.577654 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65sdw\" (UniqueName: \"kubernetes.io/projected/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-kube-api-access-65sdw\") pod \"cinder-db-sync-mftv7\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.579682 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-chllv"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.589307 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mftv7" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.617141 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kxt2\" (UniqueName: \"kubernetes.io/projected/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-kube-api-access-6kxt2\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.617385 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-logs\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.617480 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-config-data\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.617622 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpcnh\" (UniqueName: \"kubernetes.io/projected/821ed115-2471-4f5e-8cca-a6a6f35f165f-kube-api-access-vpcnh\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.617731 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.617891 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-log-httpd\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.628126 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-scripts\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.626064 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-log-httpd\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.628351 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-scripts\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.628375 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.628523 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-run-httpd\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.628545 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-config-data\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.628579 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-combined-ca-bundle\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.633199 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b6jj\" (UniqueName: \"kubernetes.io/projected/f3088df5-8818-432a-997e-d6b6b2d7daca-kube-api-access-7b6jj\") pod \"neutron-db-sync-6fxl6\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.636258 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-run-httpd\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.638951 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-scripts\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.650580 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.658137 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.664679 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-pxm2p"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.673272 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.687022 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.687259 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.687432 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.687974 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-config-data\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.691914 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-dr7j4" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.700390 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpcnh\" (UniqueName: \"kubernetes.io/projected/821ed115-2471-4f5e-8cca-a6a6f35f165f-kube-api-access-vpcnh\") pod \"ceilometer-0\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.700471 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-t54sm"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.725365 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.739731 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4zvwk" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.740165 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.748350 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-certs\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.748437 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-scripts\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.748516 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-config-data\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.748551 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-combined-ca-bundle\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.748574 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752257 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xh9x\" (UniqueName: \"kubernetes.io/projected/93351c15-e5c9-46b2-8b6c-e3faa003870b-kube-api-access-7xh9x\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752397 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k72v8\" (UniqueName: \"kubernetes.io/projected/5902e376-547e-485a-a963-0c3bc5c5cfe7-kube-api-access-k72v8\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752485 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-combined-ca-bundle\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752611 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg442\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-kube-api-access-xg442\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752652 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-scripts\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752757 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-svc\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752831 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-config\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752875 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-config-data\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752926 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-combined-ca-bundle\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.752967 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.753031 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-db-sync-config-data\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.753103 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kxt2\" (UniqueName: \"kubernetes.io/projected/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-kube-api-access-6kxt2\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.753135 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.753188 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-logs\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.784530 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-config-data\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.784770 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-logs\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.784993 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-scripts\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.787303 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-combined-ca-bundle\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.789055 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-pxm2p"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.804918 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.848550 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kxt2\" (UniqueName: \"kubernetes.io/projected/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-kube-api-access-6kxt2\") pod \"placement-db-sync-5vkb8\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.854899 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-config-data\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.854958 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-combined-ca-bundle\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.854975 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855017 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xh9x\" (UniqueName: \"kubernetes.io/projected/93351c15-e5c9-46b2-8b6c-e3faa003870b-kube-api-access-7xh9x\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855048 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k72v8\" (UniqueName: \"kubernetes.io/projected/5902e376-547e-485a-a963-0c3bc5c5cfe7-kube-api-access-k72v8\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855091 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-combined-ca-bundle\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855152 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg442\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-kube-api-access-xg442\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855179 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-svc\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855240 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-config\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855274 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855316 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-db-sync-config-data\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855334 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855353 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-certs\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855393 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-scripts\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.855896 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-nb\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.856215 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-svc\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.861365 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-sb\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.862052 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-combined-ca-bundle\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.862344 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-config\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.862828 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-swift-storage-0\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.870757 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-combined-ca-bundle\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.874263 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-certs\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.875137 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-config-data\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.877239 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.879141 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-scripts\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.891252 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-db-sync-config-data\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.894146 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k72v8\" (UniqueName: \"kubernetes.io/projected/5902e376-547e-485a-a963-0c3bc5c5cfe7-kube-api-access-k72v8\") pod \"barbican-db-sync-t54sm\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.894293 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-t54sm"] Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.909384 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg442\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-kube-api-access-xg442\") pod \"cloudkitty-db-sync-pxm2p\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:52 crc kubenswrapper[4768]: I1203 16:40:52.915316 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xh9x\" (UniqueName: \"kubernetes.io/projected/93351c15-e5c9-46b2-8b6c-e3faa003870b-kube-api-access-7xh9x\") pod \"dnsmasq-dns-847c4cc679-chllv\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:52.996661 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-chllv"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:52.997368 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.041032 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.051872 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-fkggx"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.053569 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.081984 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-fkggx"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.103879 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t54sm" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.109473 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5vkb8" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.156259 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.158025 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.160132 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.161012 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-tdxnd" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.164949 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.167660 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.167971 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9w6l\" (UniqueName: \"kubernetes.io/projected/e84712b7-2b2e-47b2-8be3-024675467757-kube-api-access-h9w6l\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.168025 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-config\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.168153 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.168183 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.168270 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.168439 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269575 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269722 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269741 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269762 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-logs\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269800 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269821 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269839 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwg8w\" (UniqueName: \"kubernetes.io/projected/509dcf45-89a1-4338-af4a-b024d0485487-kube-api-access-wwg8w\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269869 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.269889 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-scripts\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.270565 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.274315 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.275139 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.275516 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.275568 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-config-data\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.275643 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9w6l\" (UniqueName: \"kubernetes.io/projected/e84712b7-2b2e-47b2-8be3-024675467757-kube-api-access-h9w6l\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.275678 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-config\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.276217 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-config\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.276482 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.295904 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9w6l\" (UniqueName: \"kubernetes.io/projected/e84712b7-2b2e-47b2-8be3-024675467757-kube-api-access-h9w6l\") pod \"dnsmasq-dns-785d8bcb8c-fkggx\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.350007 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fghzs"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.378270 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-logs\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.378385 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.378406 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwg8w\" (UniqueName: \"kubernetes.io/projected/509dcf45-89a1-4338-af4a-b024d0485487-kube-api-access-wwg8w\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.378457 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-scripts\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.378542 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.378585 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-config-data\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.378919 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.379356 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-logs\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.380072 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.381916 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.384390 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.384434 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2d20f1f07bf733bd5c44955ae3cad3d4468693e76c899493d03847aaee02910e/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.385461 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-config-data\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.386675 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-scripts\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.395581 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwg8w\" (UniqueName: \"kubernetes.io/projected/509dcf45-89a1-4338-af4a-b024d0485487-kube-api-access-wwg8w\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.448036 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.556136 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.572456 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.618261 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7g2qs"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.626112 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mftv7"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.728774 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-6fxl6"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.777782 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.811806 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.814960 4768 generic.go:334] "Generic (PLEG): container finished" podID="f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" containerID="5af3e100d72ad86a947b19872d6187818c03f7b08b8218e5ffdae5d5ab31eb25" exitCode=0 Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.819106 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-fghzs" event={"ID":"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b","Type":"ContainerDied","Data":"5af3e100d72ad86a947b19872d6187818c03f7b08b8218e5ffdae5d5ab31eb25"} Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.819157 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-fghzs" event={"ID":"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b","Type":"ContainerStarted","Data":"ae100a1b8db8f99ba0962719e996414feacc5c9d788371244b4b497aa83ada07"} Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.819277 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.822504 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mftv7" event={"ID":"8f19bd8f-a9f2-41de-b0f3-de08db42cf69","Type":"ContainerStarted","Data":"c94c494c1d325ff9d97a3bfd626a8b54487c9feab1ff4840f4008b5096d2513f"} Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.829351 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.833503 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fxl6" event={"ID":"f3088df5-8818-432a-997e-d6b6b2d7daca","Type":"ContainerStarted","Data":"8aba1c57c7566ec70b6000e3dc47774524063a76207da652be46513ea2871c8c"} Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.838607 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7g2qs" event={"ID":"e12efeef-87cb-48ae-8626-d6e02ca50b40","Type":"ContainerStarted","Data":"5b317409ad0c858c0c5025d5b90e0a6f3d5742e1a876a318ebce198c5bd2f483"} Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.851337 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.915461 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-logs\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.915702 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.915795 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwjtg\" (UniqueName: \"kubernetes.io/projected/67124cc3-c453-4e95-b380-8d94f7ca4d34-kube-api-access-lwjtg\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.915862 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.915906 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.915991 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: I1203 16:40:53.916027 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:53 crc kubenswrapper[4768]: E1203 16:40:53.964895 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf937f4ac_d4e2_4368_b0f3_ab36f8c0f39b.slice/crio-5af3e100d72ad86a947b19872d6187818c03f7b08b8218e5ffdae5d5ab31eb25.scope\": RecentStats: unable to find data in memory cache]" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.021731 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.021800 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwjtg\" (UniqueName: \"kubernetes.io/projected/67124cc3-c453-4e95-b380-8d94f7ca4d34-kube-api-access-lwjtg\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.021837 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.021866 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.021907 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.021926 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.021967 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-logs\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.022893 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-logs\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.029284 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.031458 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-scripts\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.031817 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.043404 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.043438 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/47b06e618a5b6a9be0e4e66414c4e7ce54d0762cbbe4888f533ae97371202fb7/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.062745 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-config-data\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.095852 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-chllv"] Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.130622 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwjtg\" (UniqueName: \"kubernetes.io/projected/67124cc3-c453-4e95-b380-8d94f7ca4d34-kube-api-access-lwjtg\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: W1203 16:40:54.150964 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ce5b640_44a6_4924_9f9c_d39b9247c4b3.slice/crio-850184d327061918315be9696a381013e235d62d6960614a24b932a94fa86991 WatchSource:0}: Error finding container 850184d327061918315be9696a381013e235d62d6960614a24b932a94fa86991: Status 404 returned error can't find the container with id 850184d327061918315be9696a381013e235d62d6960614a24b932a94fa86991 Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.170853 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-5vkb8"] Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.184879 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: W1203 16:40:54.200995 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod449f6efb_981d_445a_b10a_a8d76f9d027d.slice/crio-5265677e72672b311220b04d0a4684b52cd505af80517f64fce5e4b7da624cbb WatchSource:0}: Error finding container 5265677e72672b311220b04d0a4684b52cd505af80517f64fce5e4b7da624cbb: Status 404 returned error can't find the container with id 5265677e72672b311220b04d0a4684b52cd505af80517f64fce5e4b7da624cbb Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.265341 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-t54sm"] Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.297375 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-pxm2p"] Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.311274 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-fkggx"] Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.428408 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.474200 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.536997 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.642345 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-config\") pod \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.642413 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-sb\") pod \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.642572 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-swift-storage-0\") pod \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.642623 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvbf2\" (UniqueName: \"kubernetes.io/projected/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-kube-api-access-kvbf2\") pod \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.642713 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-nb\") pod \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.642736 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-svc\") pod \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\" (UID: \"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b\") " Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.665760 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-kube-api-access-kvbf2" (OuterVolumeSpecName: "kube-api-access-kvbf2") pod "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" (UID: "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b"). InnerVolumeSpecName "kube-api-access-kvbf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.686735 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" (UID: "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.690931 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" (UID: "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.698310 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" (UID: "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.699550 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" (UID: "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.718261 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-config" (OuterVolumeSpecName: "config") pod "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" (UID: "f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.749364 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.749423 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.749436 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.749446 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.749457 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.749468 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvbf2\" (UniqueName: \"kubernetes.io/projected/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b-kube-api-access-kvbf2\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.931845 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t54sm" event={"ID":"5902e376-547e-485a-a963-0c3bc5c5cfe7","Type":"ContainerStarted","Data":"7e04f5e21773dbf9543cd6e6bc7563322ddb8aea304395fdcf7d5b1316de3db6"} Dec 03 16:40:54 crc kubenswrapper[4768]: I1203 16:40:54.975933 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fxl6" event={"ID":"f3088df5-8818-432a-997e-d6b6b2d7daca","Type":"ContainerStarted","Data":"4e855590d6d14758bee6f9fe4309487b31c6ec27c7271add7ea5f20efbb3de7e"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.019735 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"509dcf45-89a1-4338-af4a-b024d0485487","Type":"ContainerStarted","Data":"0dec4c1a164fb3ec14a53a84a68d6ed1e9a808779158323752ed4bd0b48eab34"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.025725 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-6fxl6" podStartSLOduration=3.025698147 podStartE2EDuration="3.025698147s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:55.019956923 +0000 UTC m=+1351.939293346" watchObservedRunningTime="2025-12-03 16:40:55.025698147 +0000 UTC m=+1351.945034570" Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.056655 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-pxm2p" event={"ID":"449f6efb-981d-445a-b10a-a8d76f9d027d","Type":"ContainerStarted","Data":"5265677e72672b311220b04d0a4684b52cd505af80517f64fce5e4b7da624cbb"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.067159 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7g2qs" event={"ID":"e12efeef-87cb-48ae-8626-d6e02ca50b40","Type":"ContainerStarted","Data":"1df1242371a73fb6c9edfea3ec7a2033e6c41cf783b6a3ac2603d23ebc68a712"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.120898 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.126580 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" event={"ID":"e84712b7-2b2e-47b2-8be3-024675467757","Type":"ContainerStarted","Data":"d7a8edb2bc7d5199fc4a54a34f0d5249b2ba75697d4a37c37983ab62bb166a7f"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.155341 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7g2qs" podStartSLOduration=4.146576473 podStartE2EDuration="4.146576473s" podCreationTimestamp="2025-12-03 16:40:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:55.140267864 +0000 UTC m=+1352.059604287" watchObservedRunningTime="2025-12-03 16:40:55.146576473 +0000 UTC m=+1352.065912896" Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.190104 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5959f8865f-fghzs" Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.191677 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5959f8865f-fghzs" event={"ID":"f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b","Type":"ContainerDied","Data":"ae100a1b8db8f99ba0962719e996414feacc5c9d788371244b4b497aa83ada07"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.191732 4768 scope.go:117] "RemoveContainer" containerID="5af3e100d72ad86a947b19872d6187818c03f7b08b8218e5ffdae5d5ab31eb25" Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.214071 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.249834 4768 generic.go:334] "Generic (PLEG): container finished" podID="93351c15-e5c9-46b2-8b6c-e3faa003870b" containerID="9ec10f9ba42b02d74c74cdb766b2757dc3484b3a14c72d7256f6630aabb8c61a" exitCode=0 Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.249937 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-chllv" event={"ID":"93351c15-e5c9-46b2-8b6c-e3faa003870b","Type":"ContainerDied","Data":"9ec10f9ba42b02d74c74cdb766b2757dc3484b3a14c72d7256f6630aabb8c61a"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.250001 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-chllv" event={"ID":"93351c15-e5c9-46b2-8b6c-e3faa003870b","Type":"ContainerStarted","Data":"129511b21683cf95cf8d4217218822a6789fffc4841cf294b187034e3031f543"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.261543 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerStarted","Data":"2f3b4375edf721d2f3f14eebe76b94b6832c1153a67741f0f4dfe83b1c3b41c6"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.263035 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5vkb8" event={"ID":"5ce5b640-44a6-4924-9f9c-d39b9247c4b3","Type":"ContainerStarted","Data":"850184d327061918315be9696a381013e235d62d6960614a24b932a94fa86991"} Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.425445 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fghzs"] Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.456682 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5959f8865f-fghzs"] Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.469415 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.527900 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:40:55 crc kubenswrapper[4768]: W1203 16:40:55.626462 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67124cc3_c453_4e95_b380_8d94f7ca4d34.slice/crio-27080a913e1dfd89cf90da3156ea038115b7fc898be09cbd64e5b2ee62e774ff WatchSource:0}: Error finding container 27080a913e1dfd89cf90da3156ea038115b7fc898be09cbd64e5b2ee62e774ff: Status 404 returned error can't find the container with id 27080a913e1dfd89cf90da3156ea038115b7fc898be09cbd64e5b2ee62e774ff Dec 03 16:40:55 crc kubenswrapper[4768]: I1203 16:40:55.626547 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" path="/var/lib/kubelet/pods/f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b/volumes" Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.028642 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.029532 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.029633 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.046901 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8838d0fd2aebc4b8c2edf99cdba00c8f80a4f57bd4eb36faafb771ad2678be15"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.047012 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://8838d0fd2aebc4b8c2edf99cdba00c8f80a4f57bd4eb36faafb771ad2678be15" gracePeriod=600 Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.327976 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="8838d0fd2aebc4b8c2edf99cdba00c8f80a4f57bd4eb36faafb771ad2678be15" exitCode=0 Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.328060 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"8838d0fd2aebc4b8c2edf99cdba00c8f80a4f57bd4eb36faafb771ad2678be15"} Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.328100 4768 scope.go:117] "RemoveContainer" containerID="5d2288acefb9ba0668d7ad6cd6de7be9ae141e8a037ddbecc4ef7c8eb139eec9" Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.367331 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"509dcf45-89a1-4338-af4a-b024d0485487","Type":"ContainerStarted","Data":"8909e7290b5bbf00b005fb4a0d305f83524e34426609b4d5b4963f38da5f62bc"} Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.372420 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67124cc3-c453-4e95-b380-8d94f7ca4d34","Type":"ContainerStarted","Data":"27080a913e1dfd89cf90da3156ea038115b7fc898be09cbd64e5b2ee62e774ff"} Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.378396 4768 generic.go:334] "Generic (PLEG): container finished" podID="e84712b7-2b2e-47b2-8be3-024675467757" containerID="2e496b7b6a02d1629e50944245f9d436b45b778806abe0d021a61888e8a998f3" exitCode=0 Dec 03 16:40:56 crc kubenswrapper[4768]: I1203 16:40:56.378459 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" event={"ID":"e84712b7-2b2e-47b2-8be3-024675467757","Type":"ContainerDied","Data":"2e496b7b6a02d1629e50944245f9d436b45b778806abe0d021a61888e8a998f3"} Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.351573 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.406551 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-847c4cc679-chllv" event={"ID":"93351c15-e5c9-46b2-8b6c-e3faa003870b","Type":"ContainerDied","Data":"129511b21683cf95cf8d4217218822a6789fffc4841cf294b187034e3031f543"} Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.406621 4768 scope.go:117] "RemoveContainer" containerID="9ec10f9ba42b02d74c74cdb766b2757dc3484b3a14c72d7256f6630aabb8c61a" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.406653 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-847c4cc679-chllv" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.447646 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-swift-storage-0\") pod \"93351c15-e5c9-46b2-8b6c-e3faa003870b\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.447728 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xh9x\" (UniqueName: \"kubernetes.io/projected/93351c15-e5c9-46b2-8b6c-e3faa003870b-kube-api-access-7xh9x\") pod \"93351c15-e5c9-46b2-8b6c-e3faa003870b\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.447751 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-config\") pod \"93351c15-e5c9-46b2-8b6c-e3faa003870b\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.447874 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-nb\") pod \"93351c15-e5c9-46b2-8b6c-e3faa003870b\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.447924 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-sb\") pod \"93351c15-e5c9-46b2-8b6c-e3faa003870b\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.447963 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-svc\") pod \"93351c15-e5c9-46b2-8b6c-e3faa003870b\" (UID: \"93351c15-e5c9-46b2-8b6c-e3faa003870b\") " Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.464679 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93351c15-e5c9-46b2-8b6c-e3faa003870b-kube-api-access-7xh9x" (OuterVolumeSpecName: "kube-api-access-7xh9x") pod "93351c15-e5c9-46b2-8b6c-e3faa003870b" (UID: "93351c15-e5c9-46b2-8b6c-e3faa003870b"). InnerVolumeSpecName "kube-api-access-7xh9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.481888 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "93351c15-e5c9-46b2-8b6c-e3faa003870b" (UID: "93351c15-e5c9-46b2-8b6c-e3faa003870b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.483175 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "93351c15-e5c9-46b2-8b6c-e3faa003870b" (UID: "93351c15-e5c9-46b2-8b6c-e3faa003870b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.489704 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "93351c15-e5c9-46b2-8b6c-e3faa003870b" (UID: "93351c15-e5c9-46b2-8b6c-e3faa003870b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.493836 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-config" (OuterVolumeSpecName: "config") pod "93351c15-e5c9-46b2-8b6c-e3faa003870b" (UID: "93351c15-e5c9-46b2-8b6c-e3faa003870b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.499095 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "93351c15-e5c9-46b2-8b6c-e3faa003870b" (UID: "93351c15-e5c9-46b2-8b6c-e3faa003870b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.553751 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.553795 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.553805 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.553819 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xh9x\" (UniqueName: \"kubernetes.io/projected/93351c15-e5c9-46b2-8b6c-e3faa003870b-kube-api-access-7xh9x\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.553828 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.553837 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/93351c15-e5c9-46b2-8b6c-e3faa003870b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.764838 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-chllv"] Dec 03 16:40:57 crc kubenswrapper[4768]: I1203 16:40:57.777133 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-847c4cc679-chllv"] Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.474560 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" event={"ID":"e84712b7-2b2e-47b2-8be3-024675467757","Type":"ContainerStarted","Data":"d908d3735361129693c4b06e06e76e6073ba8fe017a790d36bec3d6a562684b8"} Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.484476 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9"} Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.496467 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"509dcf45-89a1-4338-af4a-b024d0485487","Type":"ContainerStarted","Data":"f50b461751b2e922876d30cc54029ff73346bacbc729b32d498800127a80ab80"} Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.496650 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-log" containerID="cri-o://8909e7290b5bbf00b005fb4a0d305f83524e34426609b4d5b4963f38da5f62bc" gracePeriod=30 Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.496787 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-httpd" containerID="cri-o://f50b461751b2e922876d30cc54029ff73346bacbc729b32d498800127a80ab80" gracePeriod=30 Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.512951 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" podStartSLOduration=7.512936277 podStartE2EDuration="7.512936277s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:59.509869074 +0000 UTC m=+1356.429205507" watchObservedRunningTime="2025-12-03 16:40:59.512936277 +0000 UTC m=+1356.432272700" Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.515627 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67124cc3-c453-4e95-b380-8d94f7ca4d34","Type":"ContainerStarted","Data":"e2fd086ef5259ce39f9ce0430404214d1ef5d9135d175690ba6929f0ed480dbe"} Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.574044 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.574022167 podStartE2EDuration="7.574022167s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:40:59.564957274 +0000 UTC m=+1356.484293727" watchObservedRunningTime="2025-12-03 16:40:59.574022167 +0000 UTC m=+1356.493358590" Dec 03 16:40:59 crc kubenswrapper[4768]: I1203 16:40:59.583132 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93351c15-e5c9-46b2-8b6c-e3faa003870b" path="/var/lib/kubelet/pods/93351c15-e5c9-46b2-8b6c-e3faa003870b/volumes" Dec 03 16:41:00 crc kubenswrapper[4768]: I1203 16:41:00.537271 4768 generic.go:334] "Generic (PLEG): container finished" podID="509dcf45-89a1-4338-af4a-b024d0485487" containerID="f50b461751b2e922876d30cc54029ff73346bacbc729b32d498800127a80ab80" exitCode=143 Dec 03 16:41:00 crc kubenswrapper[4768]: I1203 16:41:00.537846 4768 generic.go:334] "Generic (PLEG): container finished" podID="509dcf45-89a1-4338-af4a-b024d0485487" containerID="8909e7290b5bbf00b005fb4a0d305f83524e34426609b4d5b4963f38da5f62bc" exitCode=143 Dec 03 16:41:00 crc kubenswrapper[4768]: I1203 16:41:00.537352 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"509dcf45-89a1-4338-af4a-b024d0485487","Type":"ContainerDied","Data":"f50b461751b2e922876d30cc54029ff73346bacbc729b32d498800127a80ab80"} Dec 03 16:41:00 crc kubenswrapper[4768]: I1203 16:41:00.537907 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"509dcf45-89a1-4338-af4a-b024d0485487","Type":"ContainerDied","Data":"8909e7290b5bbf00b005fb4a0d305f83524e34426609b4d5b4963f38da5f62bc"} Dec 03 16:41:00 crc kubenswrapper[4768]: I1203 16:41:00.538946 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:41:02 crc kubenswrapper[4768]: I1203 16:41:02.562027 4768 generic.go:334] "Generic (PLEG): container finished" podID="e12efeef-87cb-48ae-8626-d6e02ca50b40" containerID="1df1242371a73fb6c9edfea3ec7a2033e6c41cf783b6a3ac2603d23ebc68a712" exitCode=0 Dec 03 16:41:02 crc kubenswrapper[4768]: I1203 16:41:02.562062 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7g2qs" event={"ID":"e12efeef-87cb-48ae-8626-d6e02ca50b40","Type":"ContainerDied","Data":"1df1242371a73fb6c9edfea3ec7a2033e6c41cf783b6a3ac2603d23ebc68a712"} Dec 03 16:41:08 crc kubenswrapper[4768]: I1203 16:41:08.558783 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:41:08 crc kubenswrapper[4768]: I1203 16:41:08.624130 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-mfh94"] Dec 03 16:41:08 crc kubenswrapper[4768]: I1203 16:41:08.624346 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" containerID="cri-o://026d13935daab3d77076152d64aebf5aa64572576bea932daa7051413bc03976" gracePeriod=10 Dec 03 16:41:09 crc kubenswrapper[4768]: E1203 16:41:09.260357 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Dec 03 16:41:09 crc kubenswrapper[4768]: E1203 16:41:09.260500 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6kxt2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-5vkb8_openstack(5ce5b640-44a6-4924-9f9c-d39b9247c4b3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:41:09 crc kubenswrapper[4768]: E1203 16:41:09.261800 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-5vkb8" podUID="5ce5b640-44a6-4924-9f9c-d39b9247c4b3" Dec 03 16:41:09 crc kubenswrapper[4768]: I1203 16:41:09.657995 4768 generic.go:334] "Generic (PLEG): container finished" podID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerID="026d13935daab3d77076152d64aebf5aa64572576bea932daa7051413bc03976" exitCode=0 Dec 03 16:41:09 crc kubenswrapper[4768]: I1203 16:41:09.658086 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" event={"ID":"c84dbd36-bce8-4dec-a0f7-56ad2b103209","Type":"ContainerDied","Data":"026d13935daab3d77076152d64aebf5aa64572576bea932daa7051413bc03976"} Dec 03 16:41:09 crc kubenswrapper[4768]: E1203 16:41:09.659927 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-5vkb8" podUID="5ce5b640-44a6-4924-9f9c-d39b9247c4b3" Dec 03 16:41:10 crc kubenswrapper[4768]: I1203 16:41:10.672081 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67124cc3-c453-4e95-b380-8d94f7ca4d34","Type":"ContainerStarted","Data":"ac1e9b33ae01b4c31085ab5b9ae6bbc4c6709ce3f552a536c3438ba406414009"} Dec 03 16:41:10 crc kubenswrapper[4768]: I1203 16:41:10.672473 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-log" containerID="cri-o://e2fd086ef5259ce39f9ce0430404214d1ef5d9135d175690ba6929f0ed480dbe" gracePeriod=30 Dec 03 16:41:10 crc kubenswrapper[4768]: I1203 16:41:10.673221 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-httpd" containerID="cri-o://ac1e9b33ae01b4c31085ab5b9ae6bbc4c6709ce3f552a536c3438ba406414009" gracePeriod=30 Dec 03 16:41:10 crc kubenswrapper[4768]: I1203 16:41:10.717905 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=18.717883268 podStartE2EDuration="18.717883268s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:10.694411978 +0000 UTC m=+1367.613748441" watchObservedRunningTime="2025-12-03 16:41:10.717883268 +0000 UTC m=+1367.637219711" Dec 03 16:41:11 crc kubenswrapper[4768]: I1203 16:41:11.142941 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.153:5353: connect: connection refused" Dec 03 16:41:11 crc kubenswrapper[4768]: I1203 16:41:11.682865 4768 generic.go:334] "Generic (PLEG): container finished" podID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerID="ac1e9b33ae01b4c31085ab5b9ae6bbc4c6709ce3f552a536c3438ba406414009" exitCode=0 Dec 03 16:41:11 crc kubenswrapper[4768]: I1203 16:41:11.683138 4768 generic.go:334] "Generic (PLEG): container finished" podID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerID="e2fd086ef5259ce39f9ce0430404214d1ef5d9135d175690ba6929f0ed480dbe" exitCode=143 Dec 03 16:41:11 crc kubenswrapper[4768]: I1203 16:41:11.683030 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67124cc3-c453-4e95-b380-8d94f7ca4d34","Type":"ContainerDied","Data":"ac1e9b33ae01b4c31085ab5b9ae6bbc4c6709ce3f552a536c3438ba406414009"} Dec 03 16:41:11 crc kubenswrapper[4768]: I1203 16:41:11.683175 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67124cc3-c453-4e95-b380-8d94f7ca4d34","Type":"ContainerDied","Data":"e2fd086ef5259ce39f9ce0430404214d1ef5d9135d175690ba6929f0ed480dbe"} Dec 03 16:41:12 crc kubenswrapper[4768]: E1203 16:41:12.459145 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 03 16:41:12 crc kubenswrapper[4768]: E1203 16:41:12.459396 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k72v8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-t54sm_openstack(5902e376-547e-485a-a963-0c3bc5c5cfe7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:41:12 crc kubenswrapper[4768]: E1203 16:41:12.461325 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-t54sm" podUID="5902e376-547e-485a-a963-0c3bc5c5cfe7" Dec 03 16:41:12 crc kubenswrapper[4768]: E1203 16:41:12.710055 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-t54sm" podUID="5902e376-547e-485a-a963-0c3bc5c5cfe7" Dec 03 16:41:16 crc kubenswrapper[4768]: I1203 16:41:16.748423 4768 generic.go:334] "Generic (PLEG): container finished" podID="f3088df5-8818-432a-997e-d6b6b2d7daca" containerID="4e855590d6d14758bee6f9fe4309487b31c6ec27c7271add7ea5f20efbb3de7e" exitCode=0 Dec 03 16:41:16 crc kubenswrapper[4768]: I1203 16:41:16.748945 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fxl6" event={"ID":"f3088df5-8818-432a-997e-d6b6b2d7daca","Type":"ContainerDied","Data":"4e855590d6d14758bee6f9fe4309487b31c6ec27c7271add7ea5f20efbb3de7e"} Dec 03 16:41:21 crc kubenswrapper[4768]: I1203 16:41:21.142583 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.153:5353: i/o timeout" Dec 03 16:41:21 crc kubenswrapper[4768]: I1203 16:41:21.533357 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:41:23 crc kubenswrapper[4768]: I1203 16:41:23.575529 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:41:23 crc kubenswrapper[4768]: I1203 16:41:23.575961 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:41:24 crc kubenswrapper[4768]: I1203 16:41:24.474830 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:24 crc kubenswrapper[4768]: I1203 16:41:24.474869 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:26 crc kubenswrapper[4768]: I1203 16:41:26.143930 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.153:5353: i/o timeout" Dec 03 16:41:26 crc kubenswrapper[4768]: I1203 16:41:26.144420 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:41:28 crc kubenswrapper[4768]: E1203 16:41:28.052757 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 03 16:41:28 crc kubenswrapper[4768]: E1203 16:41:28.053218 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-65sdw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-mftv7_openstack(8f19bd8f-a9f2-41de-b0f3-de08db42cf69): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:41:28 crc kubenswrapper[4768]: E1203 16:41:28.054394 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-mftv7" podUID="8f19bd8f-a9f2-41de-b0f3-de08db42cf69" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.534451 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.594309 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.602995 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.606509 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654266 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-scripts\") pod \"e12efeef-87cb-48ae-8626-d6e02ca50b40\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654313 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2j6g\" (UniqueName: \"kubernetes.io/projected/e12efeef-87cb-48ae-8626-d6e02ca50b40-kube-api-access-l2j6g\") pod \"e12efeef-87cb-48ae-8626-d6e02ca50b40\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654446 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"509dcf45-89a1-4338-af4a-b024d0485487\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654465 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwg8w\" (UniqueName: \"kubernetes.io/projected/509dcf45-89a1-4338-af4a-b024d0485487-kube-api-access-wwg8w\") pod \"509dcf45-89a1-4338-af4a-b024d0485487\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654490 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-logs\") pod \"509dcf45-89a1-4338-af4a-b024d0485487\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654518 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-httpd-run\") pod \"509dcf45-89a1-4338-af4a-b024d0485487\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654638 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-combined-ca-bundle\") pod \"e12efeef-87cb-48ae-8626-d6e02ca50b40\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.654656 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-fernet-keys\") pod \"e12efeef-87cb-48ae-8626-d6e02ca50b40\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.655119 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-logs" (OuterVolumeSpecName: "logs") pod "509dcf45-89a1-4338-af4a-b024d0485487" (UID: "509dcf45-89a1-4338-af4a-b024d0485487"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.655304 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "509dcf45-89a1-4338-af4a-b024d0485487" (UID: "509dcf45-89a1-4338-af4a-b024d0485487"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.655686 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-combined-ca-bundle\") pod \"509dcf45-89a1-4338-af4a-b024d0485487\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.655717 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-sb\") pod \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656124 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b6jj\" (UniqueName: \"kubernetes.io/projected/f3088df5-8818-432a-997e-d6b6b2d7daca-kube-api-access-7b6jj\") pod \"f3088df5-8818-432a-997e-d6b6b2d7daca\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656150 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-svc\") pod \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656173 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-credential-keys\") pod \"e12efeef-87cb-48ae-8626-d6e02ca50b40\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656198 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-config-data\") pod \"e12efeef-87cb-48ae-8626-d6e02ca50b40\" (UID: \"e12efeef-87cb-48ae-8626-d6e02ca50b40\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656257 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-swift-storage-0\") pod \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656318 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-nb\") pod \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656337 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-config\") pod \"f3088df5-8818-432a-997e-d6b6b2d7daca\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656363 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-combined-ca-bundle\") pod \"f3088df5-8818-432a-997e-d6b6b2d7daca\" (UID: \"f3088df5-8818-432a-997e-d6b6b2d7daca\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656383 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-config\") pod \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656397 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-scripts\") pod \"509dcf45-89a1-4338-af4a-b024d0485487\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656417 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-config-data\") pod \"509dcf45-89a1-4338-af4a-b024d0485487\" (UID: \"509dcf45-89a1-4338-af4a-b024d0485487\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656491 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxrzx\" (UniqueName: \"kubernetes.io/projected/c84dbd36-bce8-4dec-a0f7-56ad2b103209-kube-api-access-nxrzx\") pod \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\" (UID: \"c84dbd36-bce8-4dec-a0f7-56ad2b103209\") " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656961 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.656980 4768 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/509dcf45-89a1-4338-af4a-b024d0485487-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.669721 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e12efeef-87cb-48ae-8626-d6e02ca50b40" (UID: "e12efeef-87cb-48ae-8626-d6e02ca50b40"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.673276 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-scripts" (OuterVolumeSpecName: "scripts") pod "e12efeef-87cb-48ae-8626-d6e02ca50b40" (UID: "e12efeef-87cb-48ae-8626-d6e02ca50b40"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.678189 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3088df5-8818-432a-997e-d6b6b2d7daca-kube-api-access-7b6jj" (OuterVolumeSpecName: "kube-api-access-7b6jj") pod "f3088df5-8818-432a-997e-d6b6b2d7daca" (UID: "f3088df5-8818-432a-997e-d6b6b2d7daca"). InnerVolumeSpecName "kube-api-access-7b6jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.684478 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "509dcf45-89a1-4338-af4a-b024d0485487" (UID: "509dcf45-89a1-4338-af4a-b024d0485487"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.684794 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099" (OuterVolumeSpecName: "glance") pod "509dcf45-89a1-4338-af4a-b024d0485487" (UID: "509dcf45-89a1-4338-af4a-b024d0485487"). InnerVolumeSpecName "pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.686338 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e12efeef-87cb-48ae-8626-d6e02ca50b40" (UID: "e12efeef-87cb-48ae-8626-d6e02ca50b40"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.686566 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e12efeef-87cb-48ae-8626-d6e02ca50b40-kube-api-access-l2j6g" (OuterVolumeSpecName: "kube-api-access-l2j6g") pod "e12efeef-87cb-48ae-8626-d6e02ca50b40" (UID: "e12efeef-87cb-48ae-8626-d6e02ca50b40"). InnerVolumeSpecName "kube-api-access-l2j6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.687159 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/509dcf45-89a1-4338-af4a-b024d0485487-kube-api-access-wwg8w" (OuterVolumeSpecName: "kube-api-access-wwg8w") pod "509dcf45-89a1-4338-af4a-b024d0485487" (UID: "509dcf45-89a1-4338-af4a-b024d0485487"). InnerVolumeSpecName "kube-api-access-wwg8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.688315 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c84dbd36-bce8-4dec-a0f7-56ad2b103209-kube-api-access-nxrzx" (OuterVolumeSpecName: "kube-api-access-nxrzx") pod "c84dbd36-bce8-4dec-a0f7-56ad2b103209" (UID: "c84dbd36-bce8-4dec-a0f7-56ad2b103209"). InnerVolumeSpecName "kube-api-access-nxrzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.698646 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3088df5-8818-432a-997e-d6b6b2d7daca" (UID: "f3088df5-8818-432a-997e-d6b6b2d7daca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.698752 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-scripts" (OuterVolumeSpecName: "scripts") pod "509dcf45-89a1-4338-af4a-b024d0485487" (UID: "509dcf45-89a1-4338-af4a-b024d0485487"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.736214 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e12efeef-87cb-48ae-8626-d6e02ca50b40" (UID: "e12efeef-87cb-48ae-8626-d6e02ca50b40"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.738624 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-config" (OuterVolumeSpecName: "config") pod "c84dbd36-bce8-4dec-a0f7-56ad2b103209" (UID: "c84dbd36-bce8-4dec-a0f7-56ad2b103209"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.748265 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-config-data" (OuterVolumeSpecName: "config-data") pod "e12efeef-87cb-48ae-8626-d6e02ca50b40" (UID: "e12efeef-87cb-48ae-8626-d6e02ca50b40"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758823 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758851 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758860 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758868 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxrzx\" (UniqueName: \"kubernetes.io/projected/c84dbd36-bce8-4dec-a0f7-56ad2b103209-kube-api-access-nxrzx\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758878 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758887 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2j6g\" (UniqueName: \"kubernetes.io/projected/e12efeef-87cb-48ae-8626-d6e02ca50b40-kube-api-access-l2j6g\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758915 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") on node \"crc\" " Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758927 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwg8w\" (UniqueName: \"kubernetes.io/projected/509dcf45-89a1-4338-af4a-b024d0485487-kube-api-access-wwg8w\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758936 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758944 4768 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758952 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758960 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b6jj\" (UniqueName: \"kubernetes.io/projected/f3088df5-8818-432a-997e-d6b6b2d7daca-kube-api-access-7b6jj\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758969 4768 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.758978 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e12efeef-87cb-48ae-8626-d6e02ca50b40-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.763313 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c84dbd36-bce8-4dec-a0f7-56ad2b103209" (UID: "c84dbd36-bce8-4dec-a0f7-56ad2b103209"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.772776 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-config" (OuterVolumeSpecName: "config") pod "f3088df5-8818-432a-997e-d6b6b2d7daca" (UID: "f3088df5-8818-432a-997e-d6b6b2d7daca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.779744 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c84dbd36-bce8-4dec-a0f7-56ad2b103209" (UID: "c84dbd36-bce8-4dec-a0f7-56ad2b103209"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.780462 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c84dbd36-bce8-4dec-a0f7-56ad2b103209" (UID: "c84dbd36-bce8-4dec-a0f7-56ad2b103209"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.782978 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c84dbd36-bce8-4dec-a0f7-56ad2b103209" (UID: "c84dbd36-bce8-4dec-a0f7-56ad2b103209"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.783026 4768 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.783153 4768 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099") on node "crc" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.792042 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-config-data" (OuterVolumeSpecName: "config-data") pod "509dcf45-89a1-4338-af4a-b024d0485487" (UID: "509dcf45-89a1-4338-af4a-b024d0485487"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.863196 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3088df5-8818-432a-997e-d6b6b2d7daca-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.863231 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/509dcf45-89a1-4338-af4a-b024d0485487-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.863241 4768 reconciler_common.go:293] "Volume detached for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.863254 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.863262 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.863270 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.863278 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c84dbd36-bce8-4dec-a0f7-56ad2b103209-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.937664 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7g2qs" event={"ID":"e12efeef-87cb-48ae-8626-d6e02ca50b40","Type":"ContainerDied","Data":"5b317409ad0c858c0c5025d5b90e0a6f3d5742e1a876a318ebce198c5bd2f483"} Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.937728 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b317409ad0c858c0c5025d5b90e0a6f3d5742e1a876a318ebce198c5bd2f483" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.937804 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7g2qs" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.940697 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-6fxl6" event={"ID":"f3088df5-8818-432a-997e-d6b6b2d7daca","Type":"ContainerDied","Data":"8aba1c57c7566ec70b6000e3dc47774524063a76207da652be46513ea2871c8c"} Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.940741 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8aba1c57c7566ec70b6000e3dc47774524063a76207da652be46513ea2871c8c" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.940837 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-6fxl6" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.978343 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.978529 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"509dcf45-89a1-4338-af4a-b024d0485487","Type":"ContainerDied","Data":"0dec4c1a164fb3ec14a53a84a68d6ed1e9a808779158323752ed4bd0b48eab34"} Dec 03 16:41:28 crc kubenswrapper[4768]: I1203 16:41:28.978562 4768 scope.go:117] "RemoveContainer" containerID="f50b461751b2e922876d30cc54029ff73346bacbc729b32d498800127a80ab80" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.005141 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.005134 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" event={"ID":"c84dbd36-bce8-4dec-a0f7-56ad2b103209","Type":"ContainerDied","Data":"ded71fdc2218f62e0e90ec45daf8b460aaa5aa1a3e3df71db2111b0996bc0fd9"} Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.007187 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-mftv7" podUID="8f19bd8f-a9f2-41de-b0f3-de08db42cf69" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.074091 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.093102 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.107716 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-mfh94"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.123253 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-764c5664d7-mfh94"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.124900 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125266 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-log" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125277 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-log" Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125297 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3088df5-8818-432a-997e-d6b6b2d7daca" containerName="neutron-db-sync" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125307 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3088df5-8818-432a-997e-d6b6b2d7daca" containerName="neutron-db-sync" Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125331 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-httpd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125337 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-httpd" Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125354 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125360 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125369 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e12efeef-87cb-48ae-8626-d6e02ca50b40" containerName="keystone-bootstrap" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125375 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e12efeef-87cb-48ae-8626-d6e02ca50b40" containerName="keystone-bootstrap" Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125384 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125426 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125437 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125443 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" Dec 03 16:41:29 crc kubenswrapper[4768]: E1203 16:41:29.125453 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93351c15-e5c9-46b2-8b6c-e3faa003870b" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125459 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="93351c15-e5c9-46b2-8b6c-e3faa003870b" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125743 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-httpd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125792 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f937f4ac-d4e2-4368-b0f3-ab36f8c0f39b" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125804 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="509dcf45-89a1-4338-af4a-b024d0485487" containerName="glance-log" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125819 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3088df5-8818-432a-997e-d6b6b2d7daca" containerName="neutron-db-sync" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125837 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="93351c15-e5c9-46b2-8b6c-e3faa003870b" containerName="init" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125873 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="e12efeef-87cb-48ae-8626-d6e02ca50b40" containerName="keystone-bootstrap" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.125894 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.127348 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.131126 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.131366 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.156954 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182017 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182212 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-scripts\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182312 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9kfd\" (UniqueName: \"kubernetes.io/projected/7126bbaa-d9d7-41db-aafa-8b783cd992fb-kube-api-access-s9kfd\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182405 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182481 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-logs\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182562 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-config-data\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182657 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.182773 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284417 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284456 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-scripts\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284481 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9kfd\" (UniqueName: \"kubernetes.io/projected/7126bbaa-d9d7-41db-aafa-8b783cd992fb-kube-api-access-s9kfd\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284522 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284547 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-logs\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284570 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-config-data\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284614 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.284651 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.285316 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-logs\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.285401 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.289405 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.289461 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-config-data\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.289485 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.290477 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.290505 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2d20f1f07bf733bd5c44955ae3cad3d4468693e76c899493d03847aaee02910e/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.292136 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-scripts\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.302094 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9kfd\" (UniqueName: \"kubernetes.io/projected/7126bbaa-d9d7-41db-aafa-8b783cd992fb-kube-api-access-s9kfd\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.328034 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.447759 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.542355 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="509dcf45-89a1-4338-af4a-b024d0485487" path="/var/lib/kubelet/pods/509dcf45-89a1-4338-af4a-b024d0485487/volumes" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.543165 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" path="/var/lib/kubelet/pods/c84dbd36-bce8-4dec-a0f7-56ad2b103209/volumes" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.632377 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7g2qs"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.641161 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7g2qs"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.734692 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-xchkb"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.736161 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.738809 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.739169 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-srlwn" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.739394 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.740138 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.747155 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xchkb"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.792588 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-scripts\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.792685 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpv2c\" (UniqueName: \"kubernetes.io/projected/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-kube-api-access-rpv2c\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.792721 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-credential-keys\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.792807 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-config-data\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.792858 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-fernet-keys\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.792888 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-combined-ca-bundle\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.894730 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-combined-ca-bundle\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.894964 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-scripts\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.895081 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpv2c\" (UniqueName: \"kubernetes.io/projected/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-kube-api-access-rpv2c\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.895173 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-credential-keys\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.895279 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-config-data\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.895392 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-fernet-keys\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.904756 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-credential-keys\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.906761 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-combined-ca-bundle\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.919952 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-9wmjd"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.921888 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.924359 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-fernet-keys\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.930899 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-config-data\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.932983 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-scripts\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.938798 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-9wmjd"] Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.939192 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpv2c\" (UniqueName: \"kubernetes.io/projected/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-kube-api-access-rpv2c\") pod \"keystone-bootstrap-xchkb\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.998848 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.998898 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.998916 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.999102 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-svc\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.999193 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9xbg\" (UniqueName: \"kubernetes.io/projected/3d3dff09-585e-4369-981c-e49cfda14fe5-kube-api-access-w9xbg\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:29 crc kubenswrapper[4768]: I1203 16:41:29.999242 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-config\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.054863 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-859c56f644-z9j2l"] Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.056457 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.060816 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.061084 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-tgrnc" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.061299 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.061497 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.065344 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-859c56f644-z9j2l"] Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.093499 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100469 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-combined-ca-bundle\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100505 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100552 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100568 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100614 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-ovndb-tls-certs\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100656 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-config\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100688 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-svc\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100720 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9xbg\" (UniqueName: \"kubernetes.io/projected/3d3dff09-585e-4369-981c-e49cfda14fe5-kube-api-access-w9xbg\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100741 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ztk7\" (UniqueName: \"kubernetes.io/projected/126fe5e3-ea62-402d-96db-35bcff0436aa-kube-api-access-4ztk7\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100766 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-config\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.100808 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-httpd-config\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.101319 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.101440 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.101962 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.102251 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-config\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.102426 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-svc\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.119031 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9xbg\" (UniqueName: \"kubernetes.io/projected/3d3dff09-585e-4369-981c-e49cfda14fe5-kube-api-access-w9xbg\") pod \"dnsmasq-dns-55f844cf75-9wmjd\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.202187 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-config\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.202266 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ztk7\" (UniqueName: \"kubernetes.io/projected/126fe5e3-ea62-402d-96db-35bcff0436aa-kube-api-access-4ztk7\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.202318 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-httpd-config\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.202371 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-combined-ca-bundle\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.202405 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-ovndb-tls-certs\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.207428 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-combined-ca-bundle\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.208885 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-httpd-config\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.209276 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-ovndb-tls-certs\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.220049 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-config\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.221953 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ztk7\" (UniqueName: \"kubernetes.io/projected/126fe5e3-ea62-402d-96db-35bcff0436aa-kube-api-access-4ztk7\") pod \"neutron-859c56f644-z9j2l\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.343353 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:30 crc kubenswrapper[4768]: I1203 16:41:30.381575 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:31 crc kubenswrapper[4768]: I1203 16:41:31.144801 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-764c5664d7-mfh94" podUID="c84dbd36-bce8-4dec-a0f7-56ad2b103209" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.153:5353: i/o timeout" Dec 03 16:41:31 crc kubenswrapper[4768]: I1203 16:41:31.545020 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e12efeef-87cb-48ae-8626-d6e02ca50b40" path="/var/lib/kubelet/pods/e12efeef-87cb-48ae-8626-d6e02ca50b40/volumes" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.134746 4768 scope.go:117] "RemoveContainer" containerID="8909e7290b5bbf00b005fb4a0d305f83524e34426609b4d5b4963f38da5f62bc" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.325100 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.371403 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-644d64cc89-l6cqk"] Dec 03 16:41:32 crc kubenswrapper[4768]: E1203 16:41:32.371818 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-httpd" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.371836 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-httpd" Dec 03 16:41:32 crc kubenswrapper[4768]: E1203 16:41:32.371857 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-log" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.371862 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-log" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.372070 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-log" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.372093 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" containerName="glance-httpd" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.373118 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.385579 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.385879 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.417506 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-644d64cc89-l6cqk"] Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.452332 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-logs\") pod \"67124cc3-c453-4e95-b380-8d94f7ca4d34\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.452415 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-combined-ca-bundle\") pod \"67124cc3-c453-4e95-b380-8d94f7ca4d34\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.452650 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"67124cc3-c453-4e95-b380-8d94f7ca4d34\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.452711 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-scripts\") pod \"67124cc3-c453-4e95-b380-8d94f7ca4d34\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.452783 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-config-data\") pod \"67124cc3-c453-4e95-b380-8d94f7ca4d34\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.452801 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-httpd-run\") pod \"67124cc3-c453-4e95-b380-8d94f7ca4d34\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.452830 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwjtg\" (UniqueName: \"kubernetes.io/projected/67124cc3-c453-4e95-b380-8d94f7ca4d34-kube-api-access-lwjtg\") pod \"67124cc3-c453-4e95-b380-8d94f7ca4d34\" (UID: \"67124cc3-c453-4e95-b380-8d94f7ca4d34\") " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453027 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-logs" (OuterVolumeSpecName: "logs") pod "67124cc3-c453-4e95-b380-8d94f7ca4d34" (UID: "67124cc3-c453-4e95-b380-8d94f7ca4d34"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453169 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-public-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453256 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-httpd-config\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453324 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-internal-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453377 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txzrc\" (UniqueName: \"kubernetes.io/projected/260f7230-73a0-4bec-b9c4-2805af398ab1-kube-api-access-txzrc\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453475 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-config\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453519 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-ovndb-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453560 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-combined-ca-bundle\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.453637 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.454212 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "67124cc3-c453-4e95-b380-8d94f7ca4d34" (UID: "67124cc3-c453-4e95-b380-8d94f7ca4d34"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.476973 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-scripts" (OuterVolumeSpecName: "scripts") pod "67124cc3-c453-4e95-b380-8d94f7ca4d34" (UID: "67124cc3-c453-4e95-b380-8d94f7ca4d34"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.489858 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67124cc3-c453-4e95-b380-8d94f7ca4d34-kube-api-access-lwjtg" (OuterVolumeSpecName: "kube-api-access-lwjtg") pod "67124cc3-c453-4e95-b380-8d94f7ca4d34" (UID: "67124cc3-c453-4e95-b380-8d94f7ca4d34"). InnerVolumeSpecName "kube-api-access-lwjtg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.499620 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67124cc3-c453-4e95-b380-8d94f7ca4d34" (UID: "67124cc3-c453-4e95-b380-8d94f7ca4d34"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.515295 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a" (OuterVolumeSpecName: "glance") pod "67124cc3-c453-4e95-b380-8d94f7ca4d34" (UID: "67124cc3-c453-4e95-b380-8d94f7ca4d34"). InnerVolumeSpecName "pvc-45057964-d335-410f-a814-6d1d79c3091a". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.534672 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-config-data" (OuterVolumeSpecName: "config-data") pod "67124cc3-c453-4e95-b380-8d94f7ca4d34" (UID: "67124cc3-c453-4e95-b380-8d94f7ca4d34"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556024 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-internal-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556081 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txzrc\" (UniqueName: \"kubernetes.io/projected/260f7230-73a0-4bec-b9c4-2805af398ab1-kube-api-access-txzrc\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556142 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-config\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556166 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-ovndb-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556201 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-combined-ca-bundle\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556223 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-public-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556271 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-httpd-config\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556348 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") on node \"crc\" " Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556361 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556371 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556380 4768 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/67124cc3-c453-4e95-b380-8d94f7ca4d34-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556388 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwjtg\" (UniqueName: \"kubernetes.io/projected/67124cc3-c453-4e95-b380-8d94f7ca4d34-kube-api-access-lwjtg\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.556399 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67124cc3-c453-4e95-b380-8d94f7ca4d34-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.560537 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-internal-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.561379 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-config\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.562451 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-combined-ca-bundle\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.565700 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-ovndb-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.565890 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-httpd-config\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.566346 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/260f7230-73a0-4bec-b9c4-2805af398ab1-public-tls-certs\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.576373 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txzrc\" (UniqueName: \"kubernetes.io/projected/260f7230-73a0-4bec-b9c4-2805af398ab1-kube-api-access-txzrc\") pod \"neutron-644d64cc89-l6cqk\" (UID: \"260f7230-73a0-4bec-b9c4-2805af398ab1\") " pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.590891 4768 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.591015 4768 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-45057964-d335-410f-a814-6d1d79c3091a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a") on node "crc" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.657968 4768 reconciler_common.go:293] "Volume detached for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:32 crc kubenswrapper[4768]: I1203 16:41:32.717244 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.075658 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"67124cc3-c453-4e95-b380-8d94f7ca4d34","Type":"ContainerDied","Data":"27080a913e1dfd89cf90da3156ea038115b7fc898be09cbd64e5b2ee62e774ff"} Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.075741 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.115850 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.137333 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.150872 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.152619 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.157147 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.157301 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.161712 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.269253 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.269552 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-logs\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.269590 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbc7r\" (UniqueName: \"kubernetes.io/projected/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-kube-api-access-pbc7r\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.269650 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.269878 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.269933 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.270060 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.270104 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372172 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372229 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372290 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372327 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372361 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372411 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-logs\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372451 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbc7r\" (UniqueName: \"kubernetes.io/projected/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-kube-api-access-pbc7r\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.372505 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.373467 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-logs\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.373814 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.374679 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.374725 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/47b06e618a5b6a9be0e4e66414c4e7ce54d0762cbbe4888f533ae97371202fb7/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.377577 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.393038 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.393538 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.394895 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.395963 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbc7r\" (UniqueName: \"kubernetes.io/projected/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-kube-api-access-pbc7r\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.417645 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.508374 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:33 crc kubenswrapper[4768]: I1203 16:41:33.558076 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67124cc3-c453-4e95-b380-8d94f7ca4d34" path="/var/lib/kubelet/pods/67124cc3-c453-4e95-b380-8d94f7ca4d34/volumes" Dec 03 16:41:36 crc kubenswrapper[4768]: E1203 16:41:36.030201 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 03 16:41:36 crc kubenswrapper[4768]: E1203 16:41:36.030988 4768 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 03 16:41:36 crc kubenswrapper[4768]: E1203 16:41:36.031159 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xg442,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-pxm2p_openstack(449f6efb-981d-445a-b10a-a8d76f9d027d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:41:36 crc kubenswrapper[4768]: E1203 16:41:36.034292 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-pxm2p" podUID="449f6efb-981d-445a-b10a-a8d76f9d027d" Dec 03 16:41:36 crc kubenswrapper[4768]: I1203 16:41:36.049029 4768 scope.go:117] "RemoveContainer" containerID="026d13935daab3d77076152d64aebf5aa64572576bea932daa7051413bc03976" Dec 03 16:41:36 crc kubenswrapper[4768]: E1203 16:41:36.130853 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current\\\"\"" pod="openstack/cloudkitty-db-sync-pxm2p" podUID="449f6efb-981d-445a-b10a-a8d76f9d027d" Dec 03 16:41:36 crc kubenswrapper[4768]: I1203 16:41:36.285758 4768 scope.go:117] "RemoveContainer" containerID="6a57611ccdd2cc8fd6bb95b3690b756401eb27be13412a3950e12345c202d39c" Dec 03 16:41:36 crc kubenswrapper[4768]: I1203 16:41:36.536046 4768 scope.go:117] "RemoveContainer" containerID="ac1e9b33ae01b4c31085ab5b9ae6bbc4c6709ce3f552a536c3438ba406414009" Dec 03 16:41:36 crc kubenswrapper[4768]: I1203 16:41:36.691111 4768 scope.go:117] "RemoveContainer" containerID="e2fd086ef5259ce39f9ce0430404214d1ef5d9135d175690ba6929f0ed480dbe" Dec 03 16:41:36 crc kubenswrapper[4768]: I1203 16:41:36.803650 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:41:36 crc kubenswrapper[4768]: I1203 16:41:36.871138 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xchkb"] Dec 03 16:41:36 crc kubenswrapper[4768]: I1203 16:41:36.992146 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-859c56f644-z9j2l"] Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.019196 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-9wmjd"] Dec 03 16:41:37 crc kubenswrapper[4768]: W1203 16:41:37.023916 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod126fe5e3_ea62_402d_96db_35bcff0436aa.slice/crio-68fcd5adc0e9453f1bc99d292b63e34d07d008cc04904986f44fc0d1a7593414 WatchSource:0}: Error finding container 68fcd5adc0e9453f1bc99d292b63e34d07d008cc04904986f44fc0d1a7593414: Status 404 returned error can't find the container with id 68fcd5adc0e9453f1bc99d292b63e34d07d008cc04904986f44fc0d1a7593414 Dec 03 16:41:37 crc kubenswrapper[4768]: W1203 16:41:37.024682 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d3dff09_585e_4369_981c_e49cfda14fe5.slice/crio-8c5d744c1e1719e7a44001a939ec2025e1078c51255130012a36e2408ad10cd8 WatchSource:0}: Error finding container 8c5d744c1e1719e7a44001a939ec2025e1078c51255130012a36e2408ad10cd8: Status 404 returned error can't find the container with id 8c5d744c1e1719e7a44001a939ec2025e1078c51255130012a36e2408ad10cd8 Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.128657 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-644d64cc89-l6cqk"] Dec 03 16:41:37 crc kubenswrapper[4768]: W1203 16:41:37.137103 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod260f7230_73a0_4bec_b9c4_2805af398ab1.slice/crio-4f124e829ae319ffc73d7f5152631f126f8a9c59305738d208501b69379ba831 WatchSource:0}: Error finding container 4f124e829ae319ffc73d7f5152631f126f8a9c59305738d208501b69379ba831: Status 404 returned error can't find the container with id 4f124e829ae319ffc73d7f5152631f126f8a9c59305738d208501b69379ba831 Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.146966 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5vkb8" event={"ID":"5ce5b640-44a6-4924-9f9c-d39b9247c4b3","Type":"ContainerStarted","Data":"03dc37845418a82e1434111c1a55ca93ba22aeb38ca3d093d67b7a40a0de4628"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.150512 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" event={"ID":"3d3dff09-585e-4369-981c-e49cfda14fe5","Type":"ContainerStarted","Data":"8c5d744c1e1719e7a44001a939ec2025e1078c51255130012a36e2408ad10cd8"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.156014 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7126bbaa-d9d7-41db-aafa-8b783cd992fb","Type":"ContainerStarted","Data":"e73804da5c87bd1f56f1bb7dd3b95c32d0a872a437599231f02ea19e19c73ff4"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.157439 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t54sm" event={"ID":"5902e376-547e-485a-a963-0c3bc5c5cfe7","Type":"ContainerStarted","Data":"ab7b3d416f73b914ad908bba6975d6a8eba78deea5b704ec42ef64231a9df727"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.164630 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xchkb" event={"ID":"8b9cb523-2eda-41a4-97de-745d2b5eb5b4","Type":"ContainerStarted","Data":"8b60833b2aa30b920aafa424656808df04f5d2708a243131266aafccfdcd4c16"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.164681 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xchkb" event={"ID":"8b9cb523-2eda-41a4-97de-745d2b5eb5b4","Type":"ContainerStarted","Data":"8f9bf0ffec4403f0a772a8e3100886db5e4c55f7325a4340c0708b92611b5e2c"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.169315 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-5vkb8" podStartSLOduration=3.260040734 podStartE2EDuration="45.169296338s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="2025-12-03 16:40:54.156694809 +0000 UTC m=+1351.076031232" lastFinishedPulling="2025-12-03 16:41:36.065950413 +0000 UTC m=+1392.985286836" observedRunningTime="2025-12-03 16:41:37.162520481 +0000 UTC m=+1394.081856904" watchObservedRunningTime="2025-12-03 16:41:37.169296338 +0000 UTC m=+1394.088632761" Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.172744 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-859c56f644-z9j2l" event={"ID":"126fe5e3-ea62-402d-96db-35bcff0436aa","Type":"ContainerStarted","Data":"68fcd5adc0e9453f1bc99d292b63e34d07d008cc04904986f44fc0d1a7593414"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.182443 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerStarted","Data":"b52ff473556cf796fedc2d917d1f301a207e69c4a1ceaad27ccae0b1252d53d1"} Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.187318 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-t54sm" podStartSLOduration=3.386192005 podStartE2EDuration="45.18729518s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="2025-12-03 16:40:54.269769655 +0000 UTC m=+1351.189106068" lastFinishedPulling="2025-12-03 16:41:36.0708728 +0000 UTC m=+1392.990209243" observedRunningTime="2025-12-03 16:41:37.17857076 +0000 UTC m=+1394.097907183" watchObservedRunningTime="2025-12-03 16:41:37.18729518 +0000 UTC m=+1394.106631603" Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.219897 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-xchkb" podStartSLOduration=8.219871008 podStartE2EDuration="8.219871008s" podCreationTimestamp="2025-12-03 16:41:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:37.199110547 +0000 UTC m=+1394.118446980" watchObservedRunningTime="2025-12-03 16:41:37.219871008 +0000 UTC m=+1394.139207431" Dec 03 16:41:37 crc kubenswrapper[4768]: I1203 16:41:37.270101 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.241199 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-859c56f644-z9j2l" event={"ID":"126fe5e3-ea62-402d-96db-35bcff0436aa","Type":"ContainerStarted","Data":"f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.241702 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-859c56f644-z9j2l" event={"ID":"126fe5e3-ea62-402d-96db-35bcff0436aa","Type":"ContainerStarted","Data":"188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.242023 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.244386 4768 generic.go:334] "Generic (PLEG): container finished" podID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerID="7d9e66512bdfc1a6faa86c9291a8193be0957146d146a32af80ff480955b6e41" exitCode=0 Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.244470 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" event={"ID":"3d3dff09-585e-4369-981c-e49cfda14fe5","Type":"ContainerDied","Data":"7d9e66512bdfc1a6faa86c9291a8193be0957146d146a32af80ff480955b6e41"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.253091 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7126bbaa-d9d7-41db-aafa-8b783cd992fb","Type":"ContainerStarted","Data":"56fe90609af053750cebb497d51d46888c5dc58d2760b4ac9cc2d1e0156f85e3"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.254741 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-644d64cc89-l6cqk" event={"ID":"260f7230-73a0-4bec-b9c4-2805af398ab1","Type":"ContainerStarted","Data":"a9236ccb6582838b122dfddc6b16f8b590558185226e68af261ff56619325e8e"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.254783 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-644d64cc89-l6cqk" event={"ID":"260f7230-73a0-4bec-b9c4-2805af398ab1","Type":"ContainerStarted","Data":"e7096a5bc5e39398423c88f69244cd70f1618b8898613b7a06f57923eed512bb"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.254794 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-644d64cc89-l6cqk" event={"ID":"260f7230-73a0-4bec-b9c4-2805af398ab1","Type":"ContainerStarted","Data":"4f124e829ae319ffc73d7f5152631f126f8a9c59305738d208501b69379ba831"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.255462 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.258019 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a","Type":"ContainerStarted","Data":"0a91d13771fdc789efbacc4260fe138fd9fa7056af604a8a93b8afceaf9549b7"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.258042 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a","Type":"ContainerStarted","Data":"f80c2f78555f430bcfd9951c690834ef488b98313c7833726915929e1838d440"} Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.264193 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-859c56f644-z9j2l" podStartSLOduration=8.26418162 podStartE2EDuration="8.26418162s" podCreationTimestamp="2025-12-03 16:41:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:38.261322077 +0000 UTC m=+1395.180658500" watchObservedRunningTime="2025-12-03 16:41:38.26418162 +0000 UTC m=+1395.183518043" Dec 03 16:41:38 crc kubenswrapper[4768]: I1203 16:41:38.333552 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-644d64cc89-l6cqk" podStartSLOduration=6.333527518 podStartE2EDuration="6.333527518s" podCreationTimestamp="2025-12-03 16:41:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:38.321246811 +0000 UTC m=+1395.240583234" watchObservedRunningTime="2025-12-03 16:41:38.333527518 +0000 UTC m=+1395.252863951" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.270205 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a","Type":"ContainerStarted","Data":"da654315bf738832b22d3d42e152e61599c50831fe17e9585f977103544abf52"} Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.272685 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" event={"ID":"3d3dff09-585e-4369-981c-e49cfda14fe5","Type":"ContainerStarted","Data":"f7526aaa43ecd4503e4453ec90c875bad92c4f2e69a6e6a083cf66f844ef0746"} Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.272789 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.274232 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7126bbaa-d9d7-41db-aafa-8b783cd992fb","Type":"ContainerStarted","Data":"6897e6021f9255f3f9879dacaa346be1f440258d4c1c009e12601d680bb41605"} Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.276221 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerStarted","Data":"22013b88b7c24ff003eef4059d7e03c3cb1f824b3664b420d051ff29243df067"} Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.306111 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.306088249 podStartE2EDuration="6.306088249s" podCreationTimestamp="2025-12-03 16:41:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:39.301746984 +0000 UTC m=+1396.221083407" watchObservedRunningTime="2025-12-03 16:41:39.306088249 +0000 UTC m=+1396.225424682" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.347294 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" podStartSLOduration=10.347254244 podStartE2EDuration="10.347254244s" podCreationTimestamp="2025-12-03 16:41:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:39.325542752 +0000 UTC m=+1396.244879185" watchObservedRunningTime="2025-12-03 16:41:39.347254244 +0000 UTC m=+1396.266590667" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.373669 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=10.373638998 podStartE2EDuration="10.373638998s" podCreationTimestamp="2025-12-03 16:41:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:39.360401661 +0000 UTC m=+1396.279738094" watchObservedRunningTime="2025-12-03 16:41:39.373638998 +0000 UTC m=+1396.292975421" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.448320 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.448473 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.480164 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:41:39 crc kubenswrapper[4768]: I1203 16:41:39.497375 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:41:40 crc kubenswrapper[4768]: I1203 16:41:40.296697 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:41:40 crc kubenswrapper[4768]: I1203 16:41:40.297298 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:41:41 crc kubenswrapper[4768]: I1203 16:41:41.305892 4768 generic.go:334] "Generic (PLEG): container finished" podID="5902e376-547e-485a-a963-0c3bc5c5cfe7" containerID="ab7b3d416f73b914ad908bba6975d6a8eba78deea5b704ec42ef64231a9df727" exitCode=0 Dec 03 16:41:41 crc kubenswrapper[4768]: I1203 16:41:41.305931 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t54sm" event={"ID":"5902e376-547e-485a-a963-0c3bc5c5cfe7","Type":"ContainerDied","Data":"ab7b3d416f73b914ad908bba6975d6a8eba78deea5b704ec42ef64231a9df727"} Dec 03 16:41:41 crc kubenswrapper[4768]: I1203 16:41:41.308532 4768 generic.go:334] "Generic (PLEG): container finished" podID="5ce5b640-44a6-4924-9f9c-d39b9247c4b3" containerID="03dc37845418a82e1434111c1a55ca93ba22aeb38ca3d093d67b7a40a0de4628" exitCode=0 Dec 03 16:41:41 crc kubenswrapper[4768]: I1203 16:41:41.308571 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5vkb8" event={"ID":"5ce5b640-44a6-4924-9f9c-d39b9247c4b3","Type":"ContainerDied","Data":"03dc37845418a82e1434111c1a55ca93ba22aeb38ca3d093d67b7a40a0de4628"} Dec 03 16:41:41 crc kubenswrapper[4768]: I1203 16:41:41.310502 4768 generic.go:334] "Generic (PLEG): container finished" podID="8b9cb523-2eda-41a4-97de-745d2b5eb5b4" containerID="8b60833b2aa30b920aafa424656808df04f5d2708a243131266aafccfdcd4c16" exitCode=0 Dec 03 16:41:41 crc kubenswrapper[4768]: I1203 16:41:41.310578 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xchkb" event={"ID":"8b9cb523-2eda-41a4-97de-745d2b5eb5b4","Type":"ContainerDied","Data":"8b60833b2aa30b920aafa424656808df04f5d2708a243131266aafccfdcd4c16"} Dec 03 16:41:43 crc kubenswrapper[4768]: I1203 16:41:43.508732 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:43 crc kubenswrapper[4768]: I1203 16:41:43.509102 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:43 crc kubenswrapper[4768]: I1203 16:41:43.568762 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:43 crc kubenswrapper[4768]: I1203 16:41:43.596895 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:44 crc kubenswrapper[4768]: I1203 16:41:44.343463 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:44 crc kubenswrapper[4768]: I1203 16:41:44.343528 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:45 crc kubenswrapper[4768]: I1203 16:41:45.344726 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:41:45 crc kubenswrapper[4768]: I1203 16:41:45.418211 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-fkggx"] Dec 03 16:41:45 crc kubenswrapper[4768]: I1203 16:41:45.418566 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" podUID="e84712b7-2b2e-47b2-8be3-024675467757" containerName="dnsmasq-dns" containerID="cri-o://d908d3735361129693c4b06e06e76e6073ba8fe017a790d36bec3d6a562684b8" gracePeriod=10 Dec 03 16:41:46 crc kubenswrapper[4768]: I1203 16:41:46.384728 4768 generic.go:334] "Generic (PLEG): container finished" podID="e84712b7-2b2e-47b2-8be3-024675467757" containerID="d908d3735361129693c4b06e06e76e6073ba8fe017a790d36bec3d6a562684b8" exitCode=0 Dec 03 16:41:46 crc kubenswrapper[4768]: I1203 16:41:46.384806 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" event={"ID":"e84712b7-2b2e-47b2-8be3-024675467757","Type":"ContainerDied","Data":"d908d3735361129693c4b06e06e76e6073ba8fe017a790d36bec3d6a562684b8"} Dec 03 16:41:46 crc kubenswrapper[4768]: I1203 16:41:46.459628 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:41:47 crc kubenswrapper[4768]: I1203 16:41:47.365834 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:47 crc kubenswrapper[4768]: I1203 16:41:47.366955 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:41:47 crc kubenswrapper[4768]: I1203 16:41:47.368487 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.137589 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.150857 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5vkb8" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.160876 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t54sm" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202565 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-combined-ca-bundle\") pod \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202613 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-credential-keys\") pod \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202630 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-config-data\") pod \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202684 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-combined-ca-bundle\") pod \"5902e376-547e-485a-a963-0c3bc5c5cfe7\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202810 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-fernet-keys\") pod \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202832 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k72v8\" (UniqueName: \"kubernetes.io/projected/5902e376-547e-485a-a963-0c3bc5c5cfe7-kube-api-access-k72v8\") pod \"5902e376-547e-485a-a963-0c3bc5c5cfe7\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202890 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-logs\") pod \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202925 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-combined-ca-bundle\") pod \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202947 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-scripts\") pod \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.202974 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpv2c\" (UniqueName: \"kubernetes.io/projected/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-kube-api-access-rpv2c\") pod \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.203010 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-scripts\") pod \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\" (UID: \"8b9cb523-2eda-41a4-97de-745d2b5eb5b4\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.203038 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-config-data\") pod \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.203052 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-db-sync-config-data\") pod \"5902e376-547e-485a-a963-0c3bc5c5cfe7\" (UID: \"5902e376-547e-485a-a963-0c3bc5c5cfe7\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.203070 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6kxt2\" (UniqueName: \"kubernetes.io/projected/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-kube-api-access-6kxt2\") pod \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\" (UID: \"5ce5b640-44a6-4924-9f9c-d39b9247c4b3\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.208970 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-logs" (OuterVolumeSpecName: "logs") pod "5ce5b640-44a6-4924-9f9c-d39b9247c4b3" (UID: "5ce5b640-44a6-4924-9f9c-d39b9247c4b3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.278098 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5902e376-547e-485a-a963-0c3bc5c5cfe7-kube-api-access-k72v8" (OuterVolumeSpecName: "kube-api-access-k72v8") pod "5902e376-547e-485a-a963-0c3bc5c5cfe7" (UID: "5902e376-547e-485a-a963-0c3bc5c5cfe7"). InnerVolumeSpecName "kube-api-access-k72v8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.297859 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-scripts" (OuterVolumeSpecName: "scripts") pod "5ce5b640-44a6-4924-9f9c-d39b9247c4b3" (UID: "5ce5b640-44a6-4924-9f9c-d39b9247c4b3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.307926 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.307957 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.307966 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k72v8\" (UniqueName: \"kubernetes.io/projected/5902e376-547e-485a-a963-0c3bc5c5cfe7-kube-api-access-k72v8\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.318777 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5902e376-547e-485a-a963-0c3bc5c5cfe7" (UID: "5902e376-547e-485a-a963-0c3bc5c5cfe7"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.323051 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-kube-api-access-rpv2c" (OuterVolumeSpecName: "kube-api-access-rpv2c") pod "8b9cb523-2eda-41a4-97de-745d2b5eb5b4" (UID: "8b9cb523-2eda-41a4-97de-745d2b5eb5b4"). InnerVolumeSpecName "kube-api-access-rpv2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.326005 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8b9cb523-2eda-41a4-97de-745d2b5eb5b4" (UID: "8b9cb523-2eda-41a4-97de-745d2b5eb5b4"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.326328 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-kube-api-access-6kxt2" (OuterVolumeSpecName: "kube-api-access-6kxt2") pod "5ce5b640-44a6-4924-9f9c-d39b9247c4b3" (UID: "5ce5b640-44a6-4924-9f9c-d39b9247c4b3"). InnerVolumeSpecName "kube-api-access-6kxt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.330368 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8b9cb523-2eda-41a4-97de-745d2b5eb5b4" (UID: "8b9cb523-2eda-41a4-97de-745d2b5eb5b4"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.334143 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-scripts" (OuterVolumeSpecName: "scripts") pod "8b9cb523-2eda-41a4-97de-745d2b5eb5b4" (UID: "8b9cb523-2eda-41a4-97de-745d2b5eb5b4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.405767 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5902e376-547e-485a-a963-0c3bc5c5cfe7" (UID: "5902e376-547e-485a-a963-0c3bc5c5cfe7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.412205 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpv2c\" (UniqueName: \"kubernetes.io/projected/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-kube-api-access-rpv2c\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.412284 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.412346 4768 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.412402 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6kxt2\" (UniqueName: \"kubernetes.io/projected/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-kube-api-access-6kxt2\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.412455 4768 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.412510 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5902e376-547e-485a-a963-0c3bc5c5cfe7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.412602 4768 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.413991 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ce5b640-44a6-4924-9f9c-d39b9247c4b3" (UID: "5ce5b640-44a6-4924-9f9c-d39b9247c4b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.415254 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-config-data" (OuterVolumeSpecName: "config-data") pod "8b9cb523-2eda-41a4-97de-745d2b5eb5b4" (UID: "8b9cb523-2eda-41a4-97de-745d2b5eb5b4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.420732 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-config-data" (OuterVolumeSpecName: "config-data") pod "5ce5b640-44a6-4924-9f9c-d39b9247c4b3" (UID: "5ce5b640-44a6-4924-9f9c-d39b9247c4b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.420740 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b9cb523-2eda-41a4-97de-745d2b5eb5b4" (UID: "8b9cb523-2eda-41a4-97de-745d2b5eb5b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.432857 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xchkb" event={"ID":"8b9cb523-2eda-41a4-97de-745d2b5eb5b4","Type":"ContainerDied","Data":"8f9bf0ffec4403f0a772a8e3100886db5e4c55f7325a4340c0708b92611b5e2c"} Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.432896 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f9bf0ffec4403f0a772a8e3100886db5e4c55f7325a4340c0708b92611b5e2c" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.432956 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xchkb" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.445134 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-t54sm" event={"ID":"5902e376-547e-485a-a963-0c3bc5c5cfe7","Type":"ContainerDied","Data":"7e04f5e21773dbf9543cd6e6bc7563322ddb8aea304395fdcf7d5b1316de3db6"} Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.445162 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e04f5e21773dbf9543cd6e6bc7563322ddb8aea304395fdcf7d5b1316de3db6" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.445203 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-t54sm" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.456541 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-5vkb8" event={"ID":"5ce5b640-44a6-4924-9f9c-d39b9247c4b3","Type":"ContainerDied","Data":"850184d327061918315be9696a381013e235d62d6960614a24b932a94fa86991"} Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.456562 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="850184d327061918315be9696a381013e235d62d6960614a24b932a94fa86991" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.456609 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-5vkb8" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.514951 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.514979 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.514995 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ce5b640-44a6-4924-9f9c-d39b9247c4b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.515006 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9cb523-2eda-41a4-97de-745d2b5eb5b4-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.517146 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.616029 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9w6l\" (UniqueName: \"kubernetes.io/projected/e84712b7-2b2e-47b2-8be3-024675467757-kube-api-access-h9w6l\") pod \"e84712b7-2b2e-47b2-8be3-024675467757\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.616083 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-config\") pod \"e84712b7-2b2e-47b2-8be3-024675467757\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.616165 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-swift-storage-0\") pod \"e84712b7-2b2e-47b2-8be3-024675467757\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.616251 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-svc\") pod \"e84712b7-2b2e-47b2-8be3-024675467757\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.616369 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-sb\") pod \"e84712b7-2b2e-47b2-8be3-024675467757\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.616393 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-nb\") pod \"e84712b7-2b2e-47b2-8be3-024675467757\" (UID: \"e84712b7-2b2e-47b2-8be3-024675467757\") " Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.633006 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e84712b7-2b2e-47b2-8be3-024675467757-kube-api-access-h9w6l" (OuterVolumeSpecName: "kube-api-access-h9w6l") pod "e84712b7-2b2e-47b2-8be3-024675467757" (UID: "e84712b7-2b2e-47b2-8be3-024675467757"). InnerVolumeSpecName "kube-api-access-h9w6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.698411 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e84712b7-2b2e-47b2-8be3-024675467757" (UID: "e84712b7-2b2e-47b2-8be3-024675467757"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.700812 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e84712b7-2b2e-47b2-8be3-024675467757" (UID: "e84712b7-2b2e-47b2-8be3-024675467757"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.708318 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e84712b7-2b2e-47b2-8be3-024675467757" (UID: "e84712b7-2b2e-47b2-8be3-024675467757"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.709996 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e84712b7-2b2e-47b2-8be3-024675467757" (UID: "e84712b7-2b2e-47b2-8be3-024675467757"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.718828 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9w6l\" (UniqueName: \"kubernetes.io/projected/e84712b7-2b2e-47b2-8be3-024675467757-kube-api-access-h9w6l\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.718860 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.718868 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.718877 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.718886 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.729039 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-config" (OuterVolumeSpecName: "config") pod "e84712b7-2b2e-47b2-8be3-024675467757" (UID: "e84712b7-2b2e-47b2-8be3-024675467757"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.750256 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:41:48 crc kubenswrapper[4768]: I1203 16:41:48.821028 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e84712b7-2b2e-47b2-8be3-024675467757-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.280735 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7c7c5849fb-krxhd"] Dec 03 16:41:49 crc kubenswrapper[4768]: E1203 16:41:49.281449 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b9cb523-2eda-41a4-97de-745d2b5eb5b4" containerName="keystone-bootstrap" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281466 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b9cb523-2eda-41a4-97de-745d2b5eb5b4" containerName="keystone-bootstrap" Dec 03 16:41:49 crc kubenswrapper[4768]: E1203 16:41:49.281484 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ce5b640-44a6-4924-9f9c-d39b9247c4b3" containerName="placement-db-sync" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281490 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ce5b640-44a6-4924-9f9c-d39b9247c4b3" containerName="placement-db-sync" Dec 03 16:41:49 crc kubenswrapper[4768]: E1203 16:41:49.281520 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e84712b7-2b2e-47b2-8be3-024675467757" containerName="init" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281526 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e84712b7-2b2e-47b2-8be3-024675467757" containerName="init" Dec 03 16:41:49 crc kubenswrapper[4768]: E1203 16:41:49.281536 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e84712b7-2b2e-47b2-8be3-024675467757" containerName="dnsmasq-dns" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281542 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e84712b7-2b2e-47b2-8be3-024675467757" containerName="dnsmasq-dns" Dec 03 16:41:49 crc kubenswrapper[4768]: E1203 16:41:49.281555 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5902e376-547e-485a-a963-0c3bc5c5cfe7" containerName="barbican-db-sync" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281561 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5902e376-547e-485a-a963-0c3bc5c5cfe7" containerName="barbican-db-sync" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281739 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ce5b640-44a6-4924-9f9c-d39b9247c4b3" containerName="placement-db-sync" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281758 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b9cb523-2eda-41a4-97de-745d2b5eb5b4" containerName="keystone-bootstrap" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281770 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="e84712b7-2b2e-47b2-8be3-024675467757" containerName="dnsmasq-dns" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.281779 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="5902e376-547e-485a-a963-0c3bc5c5cfe7" containerName="barbican-db-sync" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.282456 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.284264 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.286162 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.287026 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.287358 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.287520 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-srlwn" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.290631 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.313120 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c7c5849fb-krxhd"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.369682 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-fernet-keys\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.370037 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-credential-keys\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.370163 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-public-tls-certs\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.370266 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-internal-tls-certs\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.370367 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-combined-ca-bundle\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.370534 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-scripts\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.370636 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-config-data\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.370880 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l5zh\" (UniqueName: \"kubernetes.io/projected/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-kube-api-access-5l5zh\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.469026 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerStarted","Data":"0ddfde2ac9d36af44236ad8e6aa81cdaf5acf1c349f9c277243187f790161eb3"} Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.473339 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l5zh\" (UniqueName: \"kubernetes.io/projected/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-kube-api-access-5l5zh\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.473460 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-fernet-keys\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.473519 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-credential-keys\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.473546 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-public-tls-certs\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.473578 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-internal-tls-certs\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.473801 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" event={"ID":"e84712b7-2b2e-47b2-8be3-024675467757","Type":"ContainerDied","Data":"d7a8edb2bc7d5199fc4a54a34f0d5249b2ba75697d4a37c37983ab62bb166a7f"} Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.473910 4768 scope.go:117] "RemoveContainer" containerID="d908d3735361129693c4b06e06e76e6073ba8fe017a790d36bec3d6a562684b8" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.474123 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-fkggx" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.475457 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-combined-ca-bundle\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.475545 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-scripts\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.475573 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-config-data\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.480827 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-internal-tls-certs\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.480950 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-public-tls-certs\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.481713 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-credential-keys\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.482108 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-scripts\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.483071 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-combined-ca-bundle\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.483213 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-config-data\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.483208 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mftv7" event={"ID":"8f19bd8f-a9f2-41de-b0f3-de08db42cf69","Type":"ContainerStarted","Data":"cd40e10cd09bf0e0dbdb8b5a036f119bc0d0a1e6f09e9f377800acb538039240"} Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.492279 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l5zh\" (UniqueName: \"kubernetes.io/projected/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-kube-api-access-5l5zh\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.500016 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5c7cbc9b-e7e7-453c-b045-02d4f0317fff-fernet-keys\") pod \"keystone-7c7c5849fb-krxhd\" (UID: \"5c7cbc9b-e7e7-453c-b045-02d4f0317fff\") " pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.529077 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-mftv7" podStartSLOduration=3.229066899 podStartE2EDuration="57.529051879s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="2025-12-03 16:40:53.642256914 +0000 UTC m=+1350.561593337" lastFinishedPulling="2025-12-03 16:41:47.942241894 +0000 UTC m=+1404.861578317" observedRunningTime="2025-12-03 16:41:49.511298483 +0000 UTC m=+1406.430634926" watchObservedRunningTime="2025-12-03 16:41:49.529051879 +0000 UTC m=+1406.448388302" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.607336 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.634743 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-58db46799c-q4fgd"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.657408 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.668328 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.668517 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4zvwk" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.668786 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.739785 4768 scope.go:117] "RemoveContainer" containerID="2e496b7b6a02d1629e50944245f9d436b45b778806abe0d021a61888e8a998f3" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.759673 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-b57955886-dvqrj"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.761528 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.766441 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.797140 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-58db46799c-q4fgd"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.815803 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-85896597d4-l886p"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.817630 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.820162 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9mjt\" (UniqueName: \"kubernetes.io/projected/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-kube-api-access-t9mjt\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.820418 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-logs\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.820866 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-combined-ca-bundle\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.821000 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-config-data\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.821083 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-config-data-custom\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.827740 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.828156 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.828535 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.828978 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-mpc26" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.829521 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.833695 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-85896597d4-l886p"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.852981 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-b57955886-dvqrj"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.889552 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-fkggx"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.915970 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-fkggx"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935074 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b2a173f-65dd-4b2f-b497-826614a4bc17-logs\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935157 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-combined-ca-bundle\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935235 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-config-data\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935274 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-combined-ca-bundle\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935325 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-config-data-custom\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935367 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9973c84b-640a-44cb-b0e0-e8a2d47ba909-logs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935399 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-internal-tls-certs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935468 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-config-data\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935555 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-scripts\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935662 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9mjt\" (UniqueName: \"kubernetes.io/projected/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-kube-api-access-t9mjt\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935701 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-public-tls-certs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935737 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-config-data-custom\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935776 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwn49\" (UniqueName: \"kubernetes.io/projected/6b2a173f-65dd-4b2f-b497-826614a4bc17-kube-api-access-xwn49\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935859 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-logs\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.935931 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-combined-ca-bundle\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.936053 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6v94\" (UniqueName: \"kubernetes.io/projected/9973c84b-640a-44cb-b0e0-e8a2d47ba909-kube-api-access-v6v94\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.936691 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-config-data\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.940437 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vpdh6"] Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.941789 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-logs\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.942446 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.944667 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-combined-ca-bundle\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.956173 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9mjt\" (UniqueName: \"kubernetes.io/projected/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-kube-api-access-t9mjt\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.957133 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-config-data-custom\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:49 crc kubenswrapper[4768]: I1203 16:41:49.981002 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d8d5741-c6d7-43c3-8f2e-da9817d0992b-config-data\") pod \"barbican-worker-58db46799c-q4fgd\" (UID: \"0d8d5741-c6d7-43c3-8f2e-da9817d0992b\") " pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.023676 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vpdh6"] Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.039803 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb24j\" (UniqueName: \"kubernetes.io/projected/4dc526fe-7c3a-41f9-94a0-76907850610e-kube-api-access-tb24j\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.039850 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.039887 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b2a173f-65dd-4b2f-b497-826614a4bc17-logs\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.039956 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-combined-ca-bundle\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.039990 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9973c84b-640a-44cb-b0e0-e8a2d47ba909-logs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040015 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-config\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040047 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-internal-tls-certs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040088 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-config-data\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040132 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-scripts\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040160 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-public-tls-certs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040184 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-config-data-custom\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040211 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040235 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwn49\" (UniqueName: \"kubernetes.io/projected/6b2a173f-65dd-4b2f-b497-826614a4bc17-kube-api-access-xwn49\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040282 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-combined-ca-bundle\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040311 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-svc\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040351 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6v94\" (UniqueName: \"kubernetes.io/projected/9973c84b-640a-44cb-b0e0-e8a2d47ba909-kube-api-access-v6v94\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040374 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.040410 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-config-data\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.054410 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b2a173f-65dd-4b2f-b497-826614a4bc17-logs\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.057100 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-scripts\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.061139 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-config-data-custom\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.061364 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-combined-ca-bundle\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.062889 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9973c84b-640a-44cb-b0e0-e8a2d47ba909-logs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.063534 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-internal-tls-certs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.065870 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-58db46799c-q4fgd" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.085312 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-bc888b5d-2h7qr"] Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.085536 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwn49\" (UniqueName: \"kubernetes.io/projected/6b2a173f-65dd-4b2f-b497-826614a4bc17-kube-api-access-xwn49\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.085538 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-config-data\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.086835 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.089181 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-combined-ca-bundle\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.089476 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.091788 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9973c84b-640a-44cb-b0e0-e8a2d47ba909-public-tls-certs\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.093094 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6v94\" (UniqueName: \"kubernetes.io/projected/9973c84b-640a-44cb-b0e0-e8a2d47ba909-kube-api-access-v6v94\") pod \"placement-85896597d4-l886p\" (UID: \"9973c84b-640a-44cb-b0e0-e8a2d47ba909\") " pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.112757 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-bc888b5d-2h7qr"] Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.121917 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b2a173f-65dd-4b2f-b497-826614a4bc17-config-data\") pod \"barbican-keystone-listener-b57955886-dvqrj\" (UID: \"6b2a173f-65dd-4b2f-b497-826614a4bc17\") " pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.141936 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-config\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.146154 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.147402 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-svc\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.147631 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.142987 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-config\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.150328 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.152076 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb24j\" (UniqueName: \"kubernetes.io/projected/4dc526fe-7c3a-41f9-94a0-76907850610e-kube-api-access-tb24j\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.152116 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.152535 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-svc\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.154866 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.156857 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.178837 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb24j\" (UniqueName: \"kubernetes.io/projected/4dc526fe-7c3a-41f9-94a0-76907850610e-kube-api-access-tb24j\") pod \"dnsmasq-dns-85ff748b95-vpdh6\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.215994 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-b57955886-dvqrj" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.226311 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.260054 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.260250 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-combined-ca-bundle\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.260475 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0470a66-e9eb-434e-855e-3914ece3246e-logs\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.260539 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data-custom\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.260635 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msz7p\" (UniqueName: \"kubernetes.io/projected/d0470a66-e9eb-434e-855e-3914ece3246e-kube-api-access-msz7p\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.364137 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0470a66-e9eb-434e-855e-3914ece3246e-logs\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.364272 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data-custom\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.364354 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msz7p\" (UniqueName: \"kubernetes.io/projected/d0470a66-e9eb-434e-855e-3914ece3246e-kube-api-access-msz7p\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.364534 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.364836 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-combined-ca-bundle\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.364881 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0470a66-e9eb-434e-855e-3914ece3246e-logs\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.372888 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.374636 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-combined-ca-bundle\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.375121 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data-custom\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.389890 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msz7p\" (UniqueName: \"kubernetes.io/projected/d0470a66-e9eb-434e-855e-3914ece3246e-kube-api-access-msz7p\") pod \"barbican-api-bc888b5d-2h7qr\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.447473 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.455483 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.463553 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7c7c5849fb-krxhd"] Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.514839 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c7c5849fb-krxhd" event={"ID":"5c7cbc9b-e7e7-453c-b045-02d4f0317fff","Type":"ContainerStarted","Data":"1a4065b2ba3efbe371c7c4343479d9830cc6f0453c77265d463886099b473a2f"} Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.682116 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-58db46799c-q4fgd"] Dec 03 16:41:50 crc kubenswrapper[4768]: I1203 16:41:50.874614 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-b57955886-dvqrj"] Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.072563 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-85896597d4-l886p"] Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.138366 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-bc888b5d-2h7qr"] Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.170714 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vpdh6"] Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.574383 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e84712b7-2b2e-47b2-8be3-024675467757" path="/var/lib/kubelet/pods/e84712b7-2b2e-47b2-8be3-024675467757/volumes" Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.576208 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-85896597d4-l886p" event={"ID":"9973c84b-640a-44cb-b0e0-e8a2d47ba909","Type":"ContainerStarted","Data":"9eeeef966d44990a68539073e3a90787d29cb123dfe67679bbf17c0df42ac270"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.576299 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-85896597d4-l886p" event={"ID":"9973c84b-640a-44cb-b0e0-e8a2d47ba909","Type":"ContainerStarted","Data":"124378d05b1da9a41c8bcdb28914f1c48db82a3264c364136a5ad36fc72aa838"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.577565 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-58db46799c-q4fgd" event={"ID":"0d8d5741-c6d7-43c3-8f2e-da9817d0992b","Type":"ContainerStarted","Data":"3a86625c25d8ac606b2d889f9fe16a94161813a6cd987027e411fcf6d5d30ef2"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.595297 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" event={"ID":"4dc526fe-7c3a-41f9-94a0-76907850610e","Type":"ContainerStarted","Data":"f3be0ef4ac8cd4cea82c4db8c60c6f4e80983e628975668dde0616109c2ee390"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.608248 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7c7c5849fb-krxhd" event={"ID":"5c7cbc9b-e7e7-453c-b045-02d4f0317fff","Type":"ContainerStarted","Data":"fe9937a02d74c72703e48fc29b05df651770b98946d00c088d1483379fcb6d5c"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.609858 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.616211 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57955886-dvqrj" event={"ID":"6b2a173f-65dd-4b2f-b497-826614a4bc17","Type":"ContainerStarted","Data":"2be13972de6faa4b94c34da5c0a57bd4f0ccff2c22829bac25d6f949d44d409d"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.623755 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-bc888b5d-2h7qr" event={"ID":"d0470a66-e9eb-434e-855e-3914ece3246e","Type":"ContainerStarted","Data":"ef785d54a0a249f9a1f47fa69c260b2f8fadcddc71167f35b1852fd23c897f8b"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.623793 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-bc888b5d-2h7qr" event={"ID":"d0470a66-e9eb-434e-855e-3914ece3246e","Type":"ContainerStarted","Data":"151dee50faec9c2987508e099d2bbfe5bb2691853ad6526d7688291a55d2c9fc"} Dec 03 16:41:51 crc kubenswrapper[4768]: I1203 16:41:51.653155 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7c7c5849fb-krxhd" podStartSLOduration=2.653134262 podStartE2EDuration="2.653134262s" podCreationTimestamp="2025-12-03 16:41:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:51.641871207 +0000 UTC m=+1408.561207630" watchObservedRunningTime="2025-12-03 16:41:51.653134262 +0000 UTC m=+1408.572470685" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.649372 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-85896597d4-l886p" event={"ID":"9973c84b-640a-44cb-b0e0-e8a2d47ba909","Type":"ContainerStarted","Data":"2f9a24c8dc523c92e6c6e3f0059c8e19df0d0c214ef8adf990776c070a14c088"} Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.649789 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.650027 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-85896597d4-l886p" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.657857 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-pxm2p" event={"ID":"449f6efb-981d-445a-b10a-a8d76f9d027d","Type":"ContainerStarted","Data":"4af4df7fb7c9fd042e43562b990cf68a90be98ac6394573f270f91f51b285261"} Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.663285 4768 generic.go:334] "Generic (PLEG): container finished" podID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerID="d6022d498736a31b5428705c28b50c236446de9b5e65e78b4995b8871f22a37c" exitCode=0 Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.663971 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" event={"ID":"4dc526fe-7c3a-41f9-94a0-76907850610e","Type":"ContainerDied","Data":"d6022d498736a31b5428705c28b50c236446de9b5e65e78b4995b8871f22a37c"} Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.664000 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.664010 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" event={"ID":"4dc526fe-7c3a-41f9-94a0-76907850610e","Type":"ContainerStarted","Data":"3d46530bb1488ece09d4580c4f327c3a4d0037fd23440855cbf068c1d0ce95ef"} Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.666572 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-bc888b5d-2h7qr" event={"ID":"d0470a66-e9eb-434e-855e-3914ece3246e","Type":"ContainerStarted","Data":"1da409058f9f17fce4c22cb57c1020954041b02a6dc3c622425b81067f82ca75"} Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.666657 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.666854 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.683001 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-85896597d4-l886p" podStartSLOduration=3.682980139 podStartE2EDuration="3.682980139s" podCreationTimestamp="2025-12-03 16:41:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:52.669941115 +0000 UTC m=+1409.589277548" watchObservedRunningTime="2025-12-03 16:41:52.682980139 +0000 UTC m=+1409.602316562" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.705257 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" podStartSLOduration=3.705233093 podStartE2EDuration="3.705233093s" podCreationTimestamp="2025-12-03 16:41:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:52.696633276 +0000 UTC m=+1409.615969699" watchObservedRunningTime="2025-12-03 16:41:52.705233093 +0000 UTC m=+1409.624569516" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.731393 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-bc888b5d-2h7qr" podStartSLOduration=3.731355581 podStartE2EDuration="3.731355581s" podCreationTimestamp="2025-12-03 16:41:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:52.720020525 +0000 UTC m=+1409.639356958" watchObservedRunningTime="2025-12-03 16:41:52.731355581 +0000 UTC m=+1409.650692004" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.751637 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-pxm2p" podStartSLOduration=3.142196214 podStartE2EDuration="1m0.751615872s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="2025-12-03 16:40:54.216298109 +0000 UTC m=+1351.135634522" lastFinishedPulling="2025-12-03 16:41:51.825717757 +0000 UTC m=+1408.745054180" observedRunningTime="2025-12-03 16:41:52.741260947 +0000 UTC m=+1409.660597370" watchObservedRunningTime="2025-12-03 16:41:52.751615872 +0000 UTC m=+1409.670952295" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.821547 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-b4f5488d-vnlnp"] Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.823784 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.830184 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.830617 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.831661 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-b4f5488d-vnlnp"] Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.943865 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b97311e7-d43d-44d1-b971-c8cb754c1773-logs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.943929 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-config-data\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.943964 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96ps2\" (UniqueName: \"kubernetes.io/projected/b97311e7-d43d-44d1-b971-c8cb754c1773-kube-api-access-96ps2\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.944192 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-public-tls-certs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.944404 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-internal-tls-certs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.944440 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-combined-ca-bundle\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:52 crc kubenswrapper[4768]: I1203 16:41:52.944510 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-config-data-custom\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.046824 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-config-data\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.046869 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96ps2\" (UniqueName: \"kubernetes.io/projected/b97311e7-d43d-44d1-b971-c8cb754c1773-kube-api-access-96ps2\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.046913 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-public-tls-certs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.046981 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-internal-tls-certs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.046999 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-combined-ca-bundle\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.047028 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-config-data-custom\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.047092 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b97311e7-d43d-44d1-b971-c8cb754c1773-logs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.047482 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b97311e7-d43d-44d1-b971-c8cb754c1773-logs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.053886 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-config-data\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.054862 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-config-data-custom\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.055131 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-combined-ca-bundle\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.055215 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-internal-tls-certs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.085685 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96ps2\" (UniqueName: \"kubernetes.io/projected/b97311e7-d43d-44d1-b971-c8cb754c1773-kube-api-access-96ps2\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.098232 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b97311e7-d43d-44d1-b971-c8cb754c1773-public-tls-certs\") pod \"barbican-api-b4f5488d-vnlnp\" (UID: \"b97311e7-d43d-44d1-b971-c8cb754c1773\") " pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:53 crc kubenswrapper[4768]: I1203 16:41:53.155109 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:54 crc kubenswrapper[4768]: I1203 16:41:54.501588 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-b4f5488d-vnlnp"] Dec 03 16:41:54 crc kubenswrapper[4768]: W1203 16:41:54.509931 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb97311e7_d43d_44d1_b971_c8cb754c1773.slice/crio-cfa061dae960c316e39529fc376f743b104be1737f2085d51cad69ffce612a1c WatchSource:0}: Error finding container cfa061dae960c316e39529fc376f743b104be1737f2085d51cad69ffce612a1c: Status 404 returned error can't find the container with id cfa061dae960c316e39529fc376f743b104be1737f2085d51cad69ffce612a1c Dec 03 16:41:54 crc kubenswrapper[4768]: I1203 16:41:54.692253 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-58db46799c-q4fgd" event={"ID":"0d8d5741-c6d7-43c3-8f2e-da9817d0992b","Type":"ContainerStarted","Data":"578282ef2240ab6068ef22fbbebd6c79d7abbb80ab8324a8ed0ec3d5f41f14ca"} Dec 03 16:41:54 crc kubenswrapper[4768]: I1203 16:41:54.695012 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-b4f5488d-vnlnp" event={"ID":"b97311e7-d43d-44d1-b971-c8cb754c1773","Type":"ContainerStarted","Data":"43a91ca44ccfc04252e54e47ac8a7e5e1e7b56d6a9f7d7c2ab9445ea3fcdfb9a"} Dec 03 16:41:54 crc kubenswrapper[4768]: I1203 16:41:54.695068 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-b4f5488d-vnlnp" event={"ID":"b97311e7-d43d-44d1-b971-c8cb754c1773","Type":"ContainerStarted","Data":"cfa061dae960c316e39529fc376f743b104be1737f2085d51cad69ffce612a1c"} Dec 03 16:41:54 crc kubenswrapper[4768]: I1203 16:41:54.696847 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57955886-dvqrj" event={"ID":"6b2a173f-65dd-4b2f-b497-826614a4bc17","Type":"ContainerStarted","Data":"fc91d3220c06fb74acd72de56c3db4b12c352cd4ba1baea1ab9493977f7d0c62"} Dec 03 16:41:54 crc kubenswrapper[4768]: I1203 16:41:54.696980 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-b57955886-dvqrj" event={"ID":"6b2a173f-65dd-4b2f-b497-826614a4bc17","Type":"ContainerStarted","Data":"47ab283af86525459e5b69d7a16d3c18ff63fb45b6e8a4c67ad3bdbb7a144b1c"} Dec 03 16:41:54 crc kubenswrapper[4768]: I1203 16:41:54.726185 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-b57955886-dvqrj" podStartSLOduration=2.66445855 podStartE2EDuration="5.726158303s" podCreationTimestamp="2025-12-03 16:41:49 +0000 UTC" firstStartedPulling="2025-12-03 16:41:50.905136119 +0000 UTC m=+1407.824472532" lastFinishedPulling="2025-12-03 16:41:53.966835862 +0000 UTC m=+1410.886172285" observedRunningTime="2025-12-03 16:41:54.71131809 +0000 UTC m=+1411.630654523" watchObservedRunningTime="2025-12-03 16:41:54.726158303 +0000 UTC m=+1411.645494726" Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.709128 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-b4f5488d-vnlnp" event={"ID":"b97311e7-d43d-44d1-b971-c8cb754c1773","Type":"ContainerStarted","Data":"775e354ca1ec4279a51dbc19ad1b31dce936900f90b090d170922e6241e075a1"} Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.709453 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.709479 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.717196 4768 generic.go:334] "Generic (PLEG): container finished" podID="8f19bd8f-a9f2-41de-b0f3-de08db42cf69" containerID="cd40e10cd09bf0e0dbdb8b5a036f119bc0d0a1e6f09e9f377800acb538039240" exitCode=0 Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.717287 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mftv7" event={"ID":"8f19bd8f-a9f2-41de-b0f3-de08db42cf69","Type":"ContainerDied","Data":"cd40e10cd09bf0e0dbdb8b5a036f119bc0d0a1e6f09e9f377800acb538039240"} Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.726886 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-58db46799c-q4fgd" event={"ID":"0d8d5741-c6d7-43c3-8f2e-da9817d0992b","Type":"ContainerStarted","Data":"0d2a8c45bcd9d5d38b9a9130b27198881f97dc93bbbc65ee72e56b8b8c50bd24"} Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.752082 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-b4f5488d-vnlnp" podStartSLOduration=3.7520500329999997 podStartE2EDuration="3.752050033s" podCreationTimestamp="2025-12-03 16:41:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:41:55.741235147 +0000 UTC m=+1412.660571610" watchObservedRunningTime="2025-12-03 16:41:55.752050033 +0000 UTC m=+1412.671386456" Dec 03 16:41:55 crc kubenswrapper[4768]: I1203 16:41:55.797618 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-58db46799c-q4fgd" podStartSLOduration=3.555575247 podStartE2EDuration="6.797582743s" podCreationTimestamp="2025-12-03 16:41:49 +0000 UTC" firstStartedPulling="2025-12-03 16:41:50.717567038 +0000 UTC m=+1407.636903461" lastFinishedPulling="2025-12-03 16:41:53.959574534 +0000 UTC m=+1410.878910957" observedRunningTime="2025-12-03 16:41:55.790631072 +0000 UTC m=+1412.709967505" watchObservedRunningTime="2025-12-03 16:41:55.797582743 +0000 UTC m=+1412.716919166" Dec 03 16:41:58 crc kubenswrapper[4768]: I1203 16:41:58.780357 4768 generic.go:334] "Generic (PLEG): container finished" podID="449f6efb-981d-445a-b10a-a8d76f9d027d" containerID="4af4df7fb7c9fd042e43562b990cf68a90be98ac6394573f270f91f51b285261" exitCode=0 Dec 03 16:41:58 crc kubenswrapper[4768]: I1203 16:41:58.780427 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-pxm2p" event={"ID":"449f6efb-981d-445a-b10a-a8d76f9d027d","Type":"ContainerDied","Data":"4af4df7fb7c9fd042e43562b990cf68a90be98ac6394573f270f91f51b285261"} Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.566298 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mftv7" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.685128 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-scripts\") pod \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.685183 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-db-sync-config-data\") pod \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.685213 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-etc-machine-id\") pod \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.685302 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65sdw\" (UniqueName: \"kubernetes.io/projected/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-kube-api-access-65sdw\") pod \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.685415 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-config-data\") pod \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.685442 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-combined-ca-bundle\") pod \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\" (UID: \"8f19bd8f-a9f2-41de-b0f3-de08db42cf69\") " Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.685683 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8f19bd8f-a9f2-41de-b0f3-de08db42cf69" (UID: "8f19bd8f-a9f2-41de-b0f3-de08db42cf69"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.687112 4768 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.693317 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-kube-api-access-65sdw" (OuterVolumeSpecName: "kube-api-access-65sdw") pod "8f19bd8f-a9f2-41de-b0f3-de08db42cf69" (UID: "8f19bd8f-a9f2-41de-b0f3-de08db42cf69"). InnerVolumeSpecName "kube-api-access-65sdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.697807 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-scripts" (OuterVolumeSpecName: "scripts") pod "8f19bd8f-a9f2-41de-b0f3-de08db42cf69" (UID: "8f19bd8f-a9f2-41de-b0f3-de08db42cf69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.705062 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "8f19bd8f-a9f2-41de-b0f3-de08db42cf69" (UID: "8f19bd8f-a9f2-41de-b0f3-de08db42cf69"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.735892 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f19bd8f-a9f2-41de-b0f3-de08db42cf69" (UID: "8f19bd8f-a9f2-41de-b0f3-de08db42cf69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.772692 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-config-data" (OuterVolumeSpecName: "config-data") pod "8f19bd8f-a9f2-41de-b0f3-de08db42cf69" (UID: "8f19bd8f-a9f2-41de-b0f3-de08db42cf69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.788587 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.788628 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.788638 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.788646 4768 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.788656 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65sdw\" (UniqueName: \"kubernetes.io/projected/8f19bd8f-a9f2-41de-b0f3-de08db42cf69-kube-api-access-65sdw\") on node \"crc\" DevicePath \"\"" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.800512 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mftv7" Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.800687 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mftv7" event={"ID":"8f19bd8f-a9f2-41de-b0f3-de08db42cf69","Type":"ContainerDied","Data":"c94c494c1d325ff9d97a3bfd626a8b54487c9feab1ff4840f4008b5096d2513f"} Dec 03 16:41:59 crc kubenswrapper[4768]: I1203 16:41:59.800738 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c94c494c1d325ff9d97a3bfd626a8b54487c9feab1ff4840f4008b5096d2513f" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.390727 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.453210 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.569452 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-9wmjd"] Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.569725 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" podUID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerName="dnsmasq-dns" containerID="cri-o://f7526aaa43ecd4503e4453ec90c875bad92c4f2e69a6e6a083cf66f844ef0746" gracePeriod=10 Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.833318 4768 generic.go:334] "Generic (PLEG): container finished" podID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerID="f7526aaa43ecd4503e4453ec90c875bad92c4f2e69a6e6a083cf66f844ef0746" exitCode=0 Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.833363 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" event={"ID":"3d3dff09-585e-4369-981c-e49cfda14fe5","Type":"ContainerDied","Data":"f7526aaa43ecd4503e4453ec90c875bad92c4f2e69a6e6a083cf66f844ef0746"} Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.907539 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:00 crc kubenswrapper[4768]: E1203 16:42:00.907974 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f19bd8f-a9f2-41de-b0f3-de08db42cf69" containerName="cinder-db-sync" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.907990 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f19bd8f-a9f2-41de-b0f3-de08db42cf69" containerName="cinder-db-sync" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.908193 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f19bd8f-a9f2-41de-b0f3-de08db42cf69" containerName="cinder-db-sync" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.909249 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.917666 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.917764 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.917870 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9hz64" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.920769 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.944657 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.967669 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-zmshs"] Dec 03 16:42:00 crc kubenswrapper[4768]: I1203 16:42:00.969430 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.024963 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-zmshs"] Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.026110 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24d2eb54-d9b1-4202-a313-0a61842a8258-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.026165 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-scripts\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.026312 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnqzh\" (UniqueName: \"kubernetes.io/projected/24d2eb54-d9b1-4202-a313-0a61842a8258-kube-api-access-qnqzh\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.026365 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.026381 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.026411 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.120636 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.122284 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.130920 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131358 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-config\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131402 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24d2eb54-d9b1-4202-a313-0a61842a8258-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131437 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-scripts\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131465 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131493 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131532 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131713 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnqzh\" (UniqueName: \"kubernetes.io/projected/24d2eb54-d9b1-4202-a313-0a61842a8258-kube-api-access-qnqzh\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131766 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cspqj\" (UniqueName: \"kubernetes.io/projected/634039d3-489e-42a0-8910-da39a41b0291-kube-api-access-cspqj\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131799 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131816 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131864 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.131883 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.132017 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24d2eb54-d9b1-4202-a313-0a61842a8258-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.134939 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.141804 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.145175 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.163925 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-scripts\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.165031 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.166064 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnqzh\" (UniqueName: \"kubernetes.io/projected/24d2eb54-d9b1-4202-a313-0a61842a8258-kube-api-access-qnqzh\") pod \"cinder-scheduler-0\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233176 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-scripts\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233223 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233273 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c02e4f2b-3aec-42cf-a642-769eef13561e-logs\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233307 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233345 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233377 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data-custom\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233411 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cspqj\" (UniqueName: \"kubernetes.io/projected/634039d3-489e-42a0-8910-da39a41b0291-kube-api-access-cspqj\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233430 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c02e4f2b-3aec-42cf-a642-769eef13561e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233460 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233482 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-config\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233516 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77pq4\" (UniqueName: \"kubernetes.io/projected/c02e4f2b-3aec-42cf-a642-769eef13561e-kube-api-access-77pq4\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233532 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.233553 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.234703 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.235220 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.235997 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.237131 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.237188 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-config\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.256368 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.257504 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cspqj\" (UniqueName: \"kubernetes.io/projected/634039d3-489e-42a0-8910-da39a41b0291-kube-api-access-cspqj\") pod \"dnsmasq-dns-5c9776ccc5-zmshs\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.315126 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.334996 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.335066 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.335100 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data-custom\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.335152 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c02e4f2b-3aec-42cf-a642-769eef13561e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.335220 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77pq4\" (UniqueName: \"kubernetes.io/projected/c02e4f2b-3aec-42cf-a642-769eef13561e-kube-api-access-77pq4\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.335250 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-scripts\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.335312 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c02e4f2b-3aec-42cf-a642-769eef13561e-logs\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.335743 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c02e4f2b-3aec-42cf-a642-769eef13561e-logs\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.340428 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.342951 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c02e4f2b-3aec-42cf-a642-769eef13561e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.347069 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.350149 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-scripts\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.351003 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data-custom\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.365742 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77pq4\" (UniqueName: \"kubernetes.io/projected/c02e4f2b-3aec-42cf-a642-769eef13561e-kube-api-access-77pq4\") pod \"cinder-api-0\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.504267 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.551499 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.644705 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-config-data\") pod \"449f6efb-981d-445a-b10a-a8d76f9d027d\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.644821 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-certs\") pod \"449f6efb-981d-445a-b10a-a8d76f9d027d\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.644904 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-combined-ca-bundle\") pod \"449f6efb-981d-445a-b10a-a8d76f9d027d\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.644967 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-scripts\") pod \"449f6efb-981d-445a-b10a-a8d76f9d027d\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.645037 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg442\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-kube-api-access-xg442\") pod \"449f6efb-981d-445a-b10a-a8d76f9d027d\" (UID: \"449f6efb-981d-445a-b10a-a8d76f9d027d\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.660799 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-certs" (OuterVolumeSpecName: "certs") pod "449f6efb-981d-445a-b10a-a8d76f9d027d" (UID: "449f6efb-981d-445a-b10a-a8d76f9d027d"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.661818 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-scripts" (OuterVolumeSpecName: "scripts") pod "449f6efb-981d-445a-b10a-a8d76f9d027d" (UID: "449f6efb-981d-445a-b10a-a8d76f9d027d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.663508 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-kube-api-access-xg442" (OuterVolumeSpecName: "kube-api-access-xg442") pod "449f6efb-981d-445a-b10a-a8d76f9d027d" (UID: "449f6efb-981d-445a-b10a-a8d76f9d027d"). InnerVolumeSpecName "kube-api-access-xg442". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.711754 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "449f6efb-981d-445a-b10a-a8d76f9d027d" (UID: "449f6efb-981d-445a-b10a-a8d76f9d027d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.740058 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-config-data" (OuterVolumeSpecName: "config-data") pod "449f6efb-981d-445a-b10a-a8d76f9d027d" (UID: "449f6efb-981d-445a-b10a-a8d76f9d027d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.752111 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg442\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-kube-api-access-xg442\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.752155 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.752167 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/449f6efb-981d-445a-b10a-a8d76f9d027d-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.752180 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.752192 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/449f6efb-981d-445a-b10a-a8d76f9d027d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.870348 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-pxm2p" event={"ID":"449f6efb-981d-445a-b10a-a8d76f9d027d","Type":"ContainerDied","Data":"5265677e72672b311220b04d0a4684b52cd505af80517f64fce5e4b7da624cbb"} Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.870382 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5265677e72672b311220b04d0a4684b52cd505af80517f64fce5e4b7da624cbb" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.870439 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-pxm2p" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.923574 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.964971 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-sb\") pod \"3d3dff09-585e-4369-981c-e49cfda14fe5\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.965085 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-swift-storage-0\") pod \"3d3dff09-585e-4369-981c-e49cfda14fe5\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.965153 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-config\") pod \"3d3dff09-585e-4369-981c-e49cfda14fe5\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.965306 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-nb\") pod \"3d3dff09-585e-4369-981c-e49cfda14fe5\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.965440 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9xbg\" (UniqueName: \"kubernetes.io/projected/3d3dff09-585e-4369-981c-e49cfda14fe5-kube-api-access-w9xbg\") pod \"3d3dff09-585e-4369-981c-e49cfda14fe5\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.965474 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-svc\") pod \"3d3dff09-585e-4369-981c-e49cfda14fe5\" (UID: \"3d3dff09-585e-4369-981c-e49cfda14fe5\") " Dec 03 16:42:01 crc kubenswrapper[4768]: I1203 16:42:01.972649 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d3dff09-585e-4369-981c-e49cfda14fe5-kube-api-access-w9xbg" (OuterVolumeSpecName: "kube-api-access-w9xbg") pod "3d3dff09-585e-4369-981c-e49cfda14fe5" (UID: "3d3dff09-585e-4369-981c-e49cfda14fe5"). InnerVolumeSpecName "kube-api-access-w9xbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.053193 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "3d3dff09-585e-4369-981c-e49cfda14fe5" (UID: "3d3dff09-585e-4369-981c-e49cfda14fe5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.102786 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9xbg\" (UniqueName: \"kubernetes.io/projected/3d3dff09-585e-4369-981c-e49cfda14fe5-kube-api-access-w9xbg\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.102859 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.132138 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "3d3dff09-585e-4369-981c-e49cfda14fe5" (UID: "3d3dff09-585e-4369-981c-e49cfda14fe5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.147322 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "3d3dff09-585e-4369-981c-e49cfda14fe5" (UID: "3d3dff09-585e-4369-981c-e49cfda14fe5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.155067 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "3d3dff09-585e-4369-981c-e49cfda14fe5" (UID: "3d3dff09-585e-4369-981c-e49cfda14fe5"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.171786 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-config" (OuterVolumeSpecName: "config") pod "3d3dff09-585e-4369-981c-e49cfda14fe5" (UID: "3d3dff09-585e-4369-981c-e49cfda14fe5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.216631 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.216660 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.216670 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.216684 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3d3dff09-585e-4369-981c-e49cfda14fe5-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.313047 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-zmshs"] Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.488936 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.660864 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.670514 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-wx7wk"] Dec 03 16:42:02 crc kubenswrapper[4768]: E1203 16:42:02.670887 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerName="dnsmasq-dns" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.670903 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerName="dnsmasq-dns" Dec 03 16:42:02 crc kubenswrapper[4768]: E1203 16:42:02.670932 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerName="init" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.670938 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerName="init" Dec 03 16:42:02 crc kubenswrapper[4768]: E1203 16:42:02.670963 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="449f6efb-981d-445a-b10a-a8d76f9d027d" containerName="cloudkitty-db-sync" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.670969 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="449f6efb-981d-445a-b10a-a8d76f9d027d" containerName="cloudkitty-db-sync" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.671137 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d3dff09-585e-4369-981c-e49cfda14fe5" containerName="dnsmasq-dns" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.671153 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="449f6efb-981d-445a-b10a-a8d76f9d027d" containerName="cloudkitty-db-sync" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.671840 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.677147 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.677334 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-dr7j4" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.677707 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.679011 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.679508 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.689845 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-wx7wk"] Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.758125 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-644d64cc89-l6cqk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.831521 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-config-data\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.831612 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-certs\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.831670 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-combined-ca-bundle\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.831708 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xq8ml\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-kube-api-access-xq8ml\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.831728 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-scripts\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.866894 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-859c56f644-z9j2l"] Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.867107 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-859c56f644-z9j2l" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-api" containerID="cri-o://188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b" gracePeriod=30 Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.867540 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-859c56f644-z9j2l" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-httpd" containerID="cri-o://f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d" gracePeriod=30 Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.924159 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" event={"ID":"634039d3-489e-42a0-8910-da39a41b0291","Type":"ContainerStarted","Data":"a5b5b7b4f52eb27090b1a6662df12a1f32e7a1123f542075a1b3b3c4c47c984d"} Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.924203 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" event={"ID":"634039d3-489e-42a0-8910-da39a41b0291","Type":"ContainerStarted","Data":"9b8ba17da8a323f9a588ee4bee0c8b03f6805880a08f9b7a73eb83410fed21b1"} Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.933214 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xq8ml\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-kube-api-access-xq8ml\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.933254 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-scripts\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.933370 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-config-data\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.933428 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-certs\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.933478 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-combined-ca-bundle\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.938347 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-combined-ca-bundle\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.938477 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-certs\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.938919 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-scripts\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.958001 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-config-data\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.975082 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xq8ml\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-kube-api-access-xq8ml\") pod \"cloudkitty-storageinit-wx7wk\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.993823 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" event={"ID":"3d3dff09-585e-4369-981c-e49cfda14fe5","Type":"ContainerDied","Data":"8c5d744c1e1719e7a44001a939ec2025e1078c51255130012a36e2408ad10cd8"} Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.993890 4768 scope.go:117] "RemoveContainer" containerID="f7526aaa43ecd4503e4453ec90c875bad92c4f2e69a6e6a083cf66f844ef0746" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.994135 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-9wmjd" Dec 03 16:42:02 crc kubenswrapper[4768]: I1203 16:42:02.994766 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.002424 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c02e4f2b-3aec-42cf-a642-769eef13561e","Type":"ContainerStarted","Data":"55509a09486e429670be216e9533a439015df7f31b030792d2a5201c48051a2b"} Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.007442 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerStarted","Data":"86ca132148f610dd674f2ae5c905aa646efd9aba5f29845290dd8ddb54cd133e"} Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.007627 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-central-agent" containerID="cri-o://b52ff473556cf796fedc2d917d1f301a207e69c4a1ceaad27ccae0b1252d53d1" gracePeriod=30 Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.007714 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="proxy-httpd" containerID="cri-o://86ca132148f610dd674f2ae5c905aa646efd9aba5f29845290dd8ddb54cd133e" gracePeriod=30 Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.007752 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="sg-core" containerID="cri-o://0ddfde2ac9d36af44236ad8e6aa81cdaf5acf1c349f9c277243187f790161eb3" gracePeriod=30 Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.007788 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-notification-agent" containerID="cri-o://22013b88b7c24ff003eef4059d7e03c3cb1f824b3664b420d051ff29243df067" gracePeriod=30 Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.008089 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.029856 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"24d2eb54-d9b1-4202-a313-0a61842a8258","Type":"ContainerStarted","Data":"e991f6706af833b7c6e7c1f84cfe9059ade45542682cee55ce29a952455cc834"} Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.070478 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.121043633 podStartE2EDuration="1m11.070456679s" podCreationTimestamp="2025-12-03 16:40:52 +0000 UTC" firstStartedPulling="2025-12-03 16:40:53.814127729 +0000 UTC m=+1350.733464142" lastFinishedPulling="2025-12-03 16:42:01.763540765 +0000 UTC m=+1418.682877188" observedRunningTime="2025-12-03 16:42:03.042510131 +0000 UTC m=+1419.961846554" watchObservedRunningTime="2025-12-03 16:42:03.070456679 +0000 UTC m=+1419.989793102" Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.093365 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-9wmjd"] Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.106948 4768 scope.go:117] "RemoveContainer" containerID="7d9e66512bdfc1a6faa86c9291a8193be0957146d146a32af80ff480955b6e41" Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.119639 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-9wmjd"] Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.599955 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d3dff09-585e-4369-981c-e49cfda14fe5" path="/var/lib/kubelet/pods/3d3dff09-585e-4369-981c-e49cfda14fe5/volumes" Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.604804 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:42:03 crc kubenswrapper[4768]: I1203 16:42:03.724520 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-wx7wk"] Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.126251 4768 generic.go:334] "Generic (PLEG): container finished" podID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerID="f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d" exitCode=0 Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.126443 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-859c56f644-z9j2l" event={"ID":"126fe5e3-ea62-402d-96db-35bcff0436aa","Type":"ContainerDied","Data":"f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.151864 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-wx7wk" event={"ID":"45d9dcbe-2050-4bb6-b5f2-8836006e5085","Type":"ContainerStarted","Data":"a11adf6facd38e6a14f14584e42c8c2b1d3fb17e91eeb06ad34278aa868ac6dd"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.154180 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c02e4f2b-3aec-42cf-a642-769eef13561e","Type":"ContainerStarted","Data":"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.156394 4768 generic.go:334] "Generic (PLEG): container finished" podID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerID="0ddfde2ac9d36af44236ad8e6aa81cdaf5acf1c349f9c277243187f790161eb3" exitCode=2 Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.156419 4768 generic.go:334] "Generic (PLEG): container finished" podID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerID="22013b88b7c24ff003eef4059d7e03c3cb1f824b3664b420d051ff29243df067" exitCode=0 Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.156428 4768 generic.go:334] "Generic (PLEG): container finished" podID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerID="b52ff473556cf796fedc2d917d1f301a207e69c4a1ceaad27ccae0b1252d53d1" exitCode=0 Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.156463 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerDied","Data":"0ddfde2ac9d36af44236ad8e6aa81cdaf5acf1c349f9c277243187f790161eb3"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.156481 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerDied","Data":"22013b88b7c24ff003eef4059d7e03c3cb1f824b3664b420d051ff29243df067"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.156490 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerDied","Data":"b52ff473556cf796fedc2d917d1f301a207e69c4a1ceaad27ccae0b1252d53d1"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.164237 4768 generic.go:334] "Generic (PLEG): container finished" podID="634039d3-489e-42a0-8910-da39a41b0291" containerID="a5b5b7b4f52eb27090b1a6662df12a1f32e7a1123f542075a1b3b3c4c47c984d" exitCode=0 Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.164281 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" event={"ID":"634039d3-489e-42a0-8910-da39a41b0291","Type":"ContainerDied","Data":"a5b5b7b4f52eb27090b1a6662df12a1f32e7a1123f542075a1b3b3c4c47c984d"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.164307 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" event={"ID":"634039d3-489e-42a0-8910-da39a41b0291","Type":"ContainerStarted","Data":"e89c9f85aff8d903e75373a5d1244c94ac8faa949d9cf1f61ccd45bebe1d0fa2"} Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.165312 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.198829 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" podStartSLOduration=4.198805849 podStartE2EDuration="4.198805849s" podCreationTimestamp="2025-12-03 16:42:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:04.182561715 +0000 UTC m=+1421.101898138" watchObservedRunningTime="2025-12-03 16:42:04.198805849 +0000 UTC m=+1421.118142272" Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.600118 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:04 crc kubenswrapper[4768]: I1203 16:42:04.921514 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:42:05 crc kubenswrapper[4768]: I1203 16:42:05.236905 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"24d2eb54-d9b1-4202-a313-0a61842a8258","Type":"ContainerStarted","Data":"61fe7dda4c0ddc2c69dc50f42f2cc41bd1ac45b7a1e5b15727bb00405f5c0b84"} Dec 03 16:42:05 crc kubenswrapper[4768]: I1203 16:42:05.280423 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-wx7wk" event={"ID":"45d9dcbe-2050-4bb6-b5f2-8836006e5085","Type":"ContainerStarted","Data":"416b952057d30e2fbed49476298cbb7ce11addd9ae2a5190aeffb30029cc5e74"} Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.133419 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.150182 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.153415 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-wx7wk" podStartSLOduration=4.153398194 podStartE2EDuration="4.153398194s" podCreationTimestamp="2025-12-03 16:42:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:05.308431061 +0000 UTC m=+1422.227767504" watchObservedRunningTime="2025-12-03 16:42:06.153398194 +0000 UTC m=+1423.072734617" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.273234 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-config\") pod \"126fe5e3-ea62-402d-96db-35bcff0436aa\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.273386 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-httpd-config\") pod \"126fe5e3-ea62-402d-96db-35bcff0436aa\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.273418 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-combined-ca-bundle\") pod \"126fe5e3-ea62-402d-96db-35bcff0436aa\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.273476 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ztk7\" (UniqueName: \"kubernetes.io/projected/126fe5e3-ea62-402d-96db-35bcff0436aa-kube-api-access-4ztk7\") pod \"126fe5e3-ea62-402d-96db-35bcff0436aa\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.273648 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-ovndb-tls-certs\") pod \"126fe5e3-ea62-402d-96db-35bcff0436aa\" (UID: \"126fe5e3-ea62-402d-96db-35bcff0436aa\") " Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.278642 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "126fe5e3-ea62-402d-96db-35bcff0436aa" (UID: "126fe5e3-ea62-402d-96db-35bcff0436aa"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.288566 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/126fe5e3-ea62-402d-96db-35bcff0436aa-kube-api-access-4ztk7" (OuterVolumeSpecName: "kube-api-access-4ztk7") pod "126fe5e3-ea62-402d-96db-35bcff0436aa" (UID: "126fe5e3-ea62-402d-96db-35bcff0436aa"). InnerVolumeSpecName "kube-api-access-4ztk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.321915 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"24d2eb54-d9b1-4202-a313-0a61842a8258","Type":"ContainerStarted","Data":"3c784bf25891f77f5b5332ff1e7dedbaf3b15fe22fd4ec53e93f510ac7a76f1c"} Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.339241 4768 generic.go:334] "Generic (PLEG): container finished" podID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerID="188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b" exitCode=0 Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.339418 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-859c56f644-z9j2l" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.340004 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-859c56f644-z9j2l" event={"ID":"126fe5e3-ea62-402d-96db-35bcff0436aa","Type":"ContainerDied","Data":"188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b"} Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.340052 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-859c56f644-z9j2l" event={"ID":"126fe5e3-ea62-402d-96db-35bcff0436aa","Type":"ContainerDied","Data":"68fcd5adc0e9453f1bc99d292b63e34d07d008cc04904986f44fc0d1a7593414"} Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.340072 4768 scope.go:117] "RemoveContainer" containerID="f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.355348 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "126fe5e3-ea62-402d-96db-35bcff0436aa" (UID: "126fe5e3-ea62-402d-96db-35bcff0436aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.358994 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-config" (OuterVolumeSpecName: "config") pod "126fe5e3-ea62-402d-96db-35bcff0436aa" (UID: "126fe5e3-ea62-402d-96db-35bcff0436aa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.362587 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.240179625 podStartE2EDuration="6.362572075s" podCreationTimestamp="2025-12-03 16:42:00 +0000 UTC" firstStartedPulling="2025-12-03 16:42:02.501804697 +0000 UTC m=+1419.421141120" lastFinishedPulling="2025-12-03 16:42:03.624197147 +0000 UTC m=+1420.543533570" observedRunningTime="2025-12-03 16:42:06.355641624 +0000 UTC m=+1423.274978047" watchObservedRunningTime="2025-12-03 16:42:06.362572075 +0000 UTC m=+1423.281908488" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.362974 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api-log" containerID="cri-o://86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c" gracePeriod=30 Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.363440 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api" containerID="cri-o://21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09" gracePeriod=30 Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.363670 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c02e4f2b-3aec-42cf-a642-769eef13561e","Type":"ContainerStarted","Data":"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09"} Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.363718 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.380882 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.383699 4768 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.383728 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.383746 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ztk7\" (UniqueName: \"kubernetes.io/projected/126fe5e3-ea62-402d-96db-35bcff0436aa-kube-api-access-4ztk7\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.403934 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.403912375 podStartE2EDuration="5.403912375s" podCreationTimestamp="2025-12-03 16:42:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:06.391255359 +0000 UTC m=+1423.310591782" watchObservedRunningTime="2025-12-03 16:42:06.403912375 +0000 UTC m=+1423.323248798" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.417202 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "126fe5e3-ea62-402d-96db-35bcff0436aa" (UID: "126fe5e3-ea62-402d-96db-35bcff0436aa"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.464097 4768 scope.go:117] "RemoveContainer" containerID="188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.485627 4768 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/126fe5e3-ea62-402d-96db-35bcff0436aa-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.530524 4768 scope.go:117] "RemoveContainer" containerID="f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d" Dec 03 16:42:06 crc kubenswrapper[4768]: E1203 16:42:06.531149 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d\": container with ID starting with f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d not found: ID does not exist" containerID="f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.531198 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d"} err="failed to get container status \"f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d\": rpc error: code = NotFound desc = could not find container \"f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d\": container with ID starting with f34c3bb565d6652cd4cd2599de94831f5d87c651f0bc869a13f28a5bef9fd07d not found: ID does not exist" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.531227 4768 scope.go:117] "RemoveContainer" containerID="188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b" Dec 03 16:42:06 crc kubenswrapper[4768]: E1203 16:42:06.531850 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b\": container with ID starting with 188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b not found: ID does not exist" containerID="188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.531876 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b"} err="failed to get container status \"188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b\": rpc error: code = NotFound desc = could not find container \"188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b\": container with ID starting with 188fecfe4d2776f96de7060ed2b4388e25065e4bf5c2e0cfac3eac769c1b993b not found: ID does not exist" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.640253 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-b4f5488d-vnlnp" Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.772161 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-859c56f644-z9j2l"] Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.781631 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-859c56f644-z9j2l"] Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.790358 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-bc888b5d-2h7qr"] Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.790577 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-bc888b5d-2h7qr" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api-log" containerID="cri-o://ef785d54a0a249f9a1f47fa69c260b2f8fadcddc71167f35b1852fd23c897f8b" gracePeriod=30 Dec 03 16:42:06 crc kubenswrapper[4768]: I1203 16:42:06.791048 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-bc888b5d-2h7qr" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api" containerID="cri-o://1da409058f9f17fce4c22cb57c1020954041b02a6dc3c622425b81067f82ca75" gracePeriod=30 Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.254998 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.317149 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-scripts\") pod \"c02e4f2b-3aec-42cf-a642-769eef13561e\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.317223 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77pq4\" (UniqueName: \"kubernetes.io/projected/c02e4f2b-3aec-42cf-a642-769eef13561e-kube-api-access-77pq4\") pod \"c02e4f2b-3aec-42cf-a642-769eef13561e\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.317243 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data-custom\") pod \"c02e4f2b-3aec-42cf-a642-769eef13561e\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.317273 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c02e4f2b-3aec-42cf-a642-769eef13561e-logs\") pod \"c02e4f2b-3aec-42cf-a642-769eef13561e\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.317782 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c02e4f2b-3aec-42cf-a642-769eef13561e-logs" (OuterVolumeSpecName: "logs") pod "c02e4f2b-3aec-42cf-a642-769eef13561e" (UID: "c02e4f2b-3aec-42cf-a642-769eef13561e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.318530 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-combined-ca-bundle\") pod \"c02e4f2b-3aec-42cf-a642-769eef13561e\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.318753 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c02e4f2b-3aec-42cf-a642-769eef13561e-etc-machine-id\") pod \"c02e4f2b-3aec-42cf-a642-769eef13561e\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.318784 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data\") pod \"c02e4f2b-3aec-42cf-a642-769eef13561e\" (UID: \"c02e4f2b-3aec-42cf-a642-769eef13561e\") " Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.319240 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c02e4f2b-3aec-42cf-a642-769eef13561e-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.319714 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c02e4f2b-3aec-42cf-a642-769eef13561e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c02e4f2b-3aec-42cf-a642-769eef13561e" (UID: "c02e4f2b-3aec-42cf-a642-769eef13561e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.324791 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c02e4f2b-3aec-42cf-a642-769eef13561e" (UID: "c02e4f2b-3aec-42cf-a642-769eef13561e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.324829 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-scripts" (OuterVolumeSpecName: "scripts") pod "c02e4f2b-3aec-42cf-a642-769eef13561e" (UID: "c02e4f2b-3aec-42cf-a642-769eef13561e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.329213 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c02e4f2b-3aec-42cf-a642-769eef13561e-kube-api-access-77pq4" (OuterVolumeSpecName: "kube-api-access-77pq4") pod "c02e4f2b-3aec-42cf-a642-769eef13561e" (UID: "c02e4f2b-3aec-42cf-a642-769eef13561e"). InnerVolumeSpecName "kube-api-access-77pq4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.359715 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c02e4f2b-3aec-42cf-a642-769eef13561e" (UID: "c02e4f2b-3aec-42cf-a642-769eef13561e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.376277 4768 generic.go:334] "Generic (PLEG): container finished" podID="d0470a66-e9eb-434e-855e-3914ece3246e" containerID="ef785d54a0a249f9a1f47fa69c260b2f8fadcddc71167f35b1852fd23c897f8b" exitCode=143 Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.376345 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-bc888b5d-2h7qr" event={"ID":"d0470a66-e9eb-434e-855e-3914ece3246e","Type":"ContainerDied","Data":"ef785d54a0a249f9a1f47fa69c260b2f8fadcddc71167f35b1852fd23c897f8b"} Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.384729 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data" (OuterVolumeSpecName: "config-data") pod "c02e4f2b-3aec-42cf-a642-769eef13561e" (UID: "c02e4f2b-3aec-42cf-a642-769eef13561e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.389809 4768 generic.go:334] "Generic (PLEG): container finished" podID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerID="21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09" exitCode=0 Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.389839 4768 generic.go:334] "Generic (PLEG): container finished" podID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerID="86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c" exitCode=143 Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.390781 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.391674 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c02e4f2b-3aec-42cf-a642-769eef13561e","Type":"ContainerDied","Data":"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09"} Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.391729 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c02e4f2b-3aec-42cf-a642-769eef13561e","Type":"ContainerDied","Data":"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c"} Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.391747 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"c02e4f2b-3aec-42cf-a642-769eef13561e","Type":"ContainerDied","Data":"55509a09486e429670be216e9533a439015df7f31b030792d2a5201c48051a2b"} Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.391768 4768 scope.go:117] "RemoveContainer" containerID="21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.422719 4768 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c02e4f2b-3aec-42cf-a642-769eef13561e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.422756 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.422790 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.422802 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77pq4\" (UniqueName: \"kubernetes.io/projected/c02e4f2b-3aec-42cf-a642-769eef13561e-kube-api-access-77pq4\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.422813 4768 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.422824 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c02e4f2b-3aec-42cf-a642-769eef13561e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.423883 4768 scope.go:117] "RemoveContainer" containerID="86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.458316 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.482029 4768 scope.go:117] "RemoveContainer" containerID="21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09" Dec 03 16:42:07 crc kubenswrapper[4768]: E1203 16:42:07.484732 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09\": container with ID starting with 21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09 not found: ID does not exist" containerID="21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.484770 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09"} err="failed to get container status \"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09\": rpc error: code = NotFound desc = could not find container \"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09\": container with ID starting with 21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09 not found: ID does not exist" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.484794 4768 scope.go:117] "RemoveContainer" containerID="86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c" Dec 03 16:42:07 crc kubenswrapper[4768]: E1203 16:42:07.486735 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c\": container with ID starting with 86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c not found: ID does not exist" containerID="86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.486759 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c"} err="failed to get container status \"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c\": rpc error: code = NotFound desc = could not find container \"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c\": container with ID starting with 86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c not found: ID does not exist" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.486777 4768 scope.go:117] "RemoveContainer" containerID="21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.492773 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09"} err="failed to get container status \"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09\": rpc error: code = NotFound desc = could not find container \"21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09\": container with ID starting with 21ced4547a6b801a8acf69721d9fa0eaa5e41a9b3875a468a9dbc351d27f4a09 not found: ID does not exist" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.492829 4768 scope.go:117] "RemoveContainer" containerID="86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.516747 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c"} err="failed to get container status \"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c\": rpc error: code = NotFound desc = could not find container \"86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c\": container with ID starting with 86c9c7300697c0a0ce0e60be3c17a351131e978320c10c189f2d6f18036aca4c not found: ID does not exist" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.519412 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.545588 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" path="/var/lib/kubelet/pods/126fe5e3-ea62-402d-96db-35bcff0436aa/volumes" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.546240 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" path="/var/lib/kubelet/pods/c02e4f2b-3aec-42cf-a642-769eef13561e/volumes" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.546993 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:07 crc kubenswrapper[4768]: E1203 16:42:07.547316 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-api" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547333 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-api" Dec 03 16:42:07 crc kubenswrapper[4768]: E1203 16:42:07.547345 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547351 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api" Dec 03 16:42:07 crc kubenswrapper[4768]: E1203 16:42:07.547374 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api-log" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547380 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api-log" Dec 03 16:42:07 crc kubenswrapper[4768]: E1203 16:42:07.547390 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-httpd" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547396 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-httpd" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547612 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-httpd" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547637 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547649 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="c02e4f2b-3aec-42cf-a642-769eef13561e" containerName="cinder-api-log" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.547665 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="126fe5e3-ea62-402d-96db-35bcff0436aa" containerName="neutron-api" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.549118 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.550120 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.553010 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.553052 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.553102 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.627274 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5wml\" (UniqueName: \"kubernetes.io/projected/ed073244-61fa-4ca6-968e-e9cb0a419e4b-kube-api-access-l5wml\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.627641 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.627919 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.627995 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed073244-61fa-4ca6-968e-e9cb0a419e4b-logs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.628054 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.628185 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-scripts\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.628343 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-config-data\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.628371 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed073244-61fa-4ca6-968e-e9cb0a419e4b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.628452 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-config-data-custom\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730385 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-config-data\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730423 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed073244-61fa-4ca6-968e-e9cb0a419e4b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730460 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-config-data-custom\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730536 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5wml\" (UniqueName: \"kubernetes.io/projected/ed073244-61fa-4ca6-968e-e9cb0a419e4b-kube-api-access-l5wml\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730570 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730680 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730699 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed073244-61fa-4ca6-968e-e9cb0a419e4b-logs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730725 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730748 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-scripts\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.730971 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ed073244-61fa-4ca6-968e-e9cb0a419e4b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.731623 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed073244-61fa-4ca6-968e-e9cb0a419e4b-logs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.734237 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-scripts\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.734942 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.735275 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-config-data\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.735753 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-config-data-custom\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.742388 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.743760 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed073244-61fa-4ca6-968e-e9cb0a419e4b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.766551 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5wml\" (UniqueName: \"kubernetes.io/projected/ed073244-61fa-4ca6-968e-e9cb0a419e4b-kube-api-access-l5wml\") pod \"cinder-api-0\" (UID: \"ed073244-61fa-4ca6-968e-e9cb0a419e4b\") " pod="openstack/cinder-api-0" Dec 03 16:42:07 crc kubenswrapper[4768]: I1203 16:42:07.875669 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 03 16:42:08 crc kubenswrapper[4768]: I1203 16:42:08.402639 4768 generic.go:334] "Generic (PLEG): container finished" podID="45d9dcbe-2050-4bb6-b5f2-8836006e5085" containerID="416b952057d30e2fbed49476298cbb7ce11addd9ae2a5190aeffb30029cc5e74" exitCode=0 Dec 03 16:42:08 crc kubenswrapper[4768]: I1203 16:42:08.402913 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-wx7wk" event={"ID":"45d9dcbe-2050-4bb6-b5f2-8836006e5085","Type":"ContainerDied","Data":"416b952057d30e2fbed49476298cbb7ce11addd9ae2a5190aeffb30029cc5e74"} Dec 03 16:42:08 crc kubenswrapper[4768]: I1203 16:42:08.510479 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 03 16:42:08 crc kubenswrapper[4768]: W1203 16:42:08.518394 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded073244_61fa_4ca6_968e_e9cb0a419e4b.slice/crio-eb3fa2eef1e27763f7975361684574fa306fbaf47492e72bcf437960c5696d2e WatchSource:0}: Error finding container eb3fa2eef1e27763f7975361684574fa306fbaf47492e72bcf437960c5696d2e: Status 404 returned error can't find the container with id eb3fa2eef1e27763f7975361684574fa306fbaf47492e72bcf437960c5696d2e Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.421977 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed073244-61fa-4ca6-968e-e9cb0a419e4b","Type":"ContainerStarted","Data":"ab23813daa627b622eccdfe0290b5656badb20042751567356bf4110316366ae"} Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.422554 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed073244-61fa-4ca6-968e-e9cb0a419e4b","Type":"ContainerStarted","Data":"eb3fa2eef1e27763f7975361684574fa306fbaf47492e72bcf437960c5696d2e"} Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.828780 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.983448 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-scripts\") pod \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.983653 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-combined-ca-bundle\") pod \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.983757 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-config-data\") pod \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.983865 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xq8ml\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-kube-api-access-xq8ml\") pod \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " Dec 03 16:42:09 crc kubenswrapper[4768]: I1203 16:42:09.984027 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-certs\") pod \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\" (UID: \"45d9dcbe-2050-4bb6-b5f2-8836006e5085\") " Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.018503 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-scripts" (OuterVolumeSpecName: "scripts") pod "45d9dcbe-2050-4bb6-b5f2-8836006e5085" (UID: "45d9dcbe-2050-4bb6-b5f2-8836006e5085"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.018672 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-certs" (OuterVolumeSpecName: "certs") pod "45d9dcbe-2050-4bb6-b5f2-8836006e5085" (UID: "45d9dcbe-2050-4bb6-b5f2-8836006e5085"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.018781 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-kube-api-access-xq8ml" (OuterVolumeSpecName: "kube-api-access-xq8ml") pod "45d9dcbe-2050-4bb6-b5f2-8836006e5085" (UID: "45d9dcbe-2050-4bb6-b5f2-8836006e5085"). InnerVolumeSpecName "kube-api-access-xq8ml". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.025492 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-config-data" (OuterVolumeSpecName: "config-data") pod "45d9dcbe-2050-4bb6-b5f2-8836006e5085" (UID: "45d9dcbe-2050-4bb6-b5f2-8836006e5085"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.040316 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45d9dcbe-2050-4bb6-b5f2-8836006e5085" (UID: "45d9dcbe-2050-4bb6-b5f2-8836006e5085"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.086003 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xq8ml\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-kube-api-access-xq8ml\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.086037 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/45d9dcbe-2050-4bb6-b5f2-8836006e5085-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.086050 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.086062 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.086074 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/45d9dcbe-2050-4bb6-b5f2-8836006e5085-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.446895 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"ed073244-61fa-4ca6-968e-e9cb0a419e4b","Type":"ContainerStarted","Data":"e3adc4cff8676a915e4b1cd5bc0b723fb82e7eda5427205ed3473389d6804a29"} Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.448349 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.462822 4768 generic.go:334] "Generic (PLEG): container finished" podID="d0470a66-e9eb-434e-855e-3914ece3246e" containerID="1da409058f9f17fce4c22cb57c1020954041b02a6dc3c622425b81067f82ca75" exitCode=0 Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.462886 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-bc888b5d-2h7qr" event={"ID":"d0470a66-e9eb-434e-855e-3914ece3246e","Type":"ContainerDied","Data":"1da409058f9f17fce4c22cb57c1020954041b02a6dc3c622425b81067f82ca75"} Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.462934 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-bc888b5d-2h7qr" event={"ID":"d0470a66-e9eb-434e-855e-3914ece3246e","Type":"ContainerDied","Data":"151dee50faec9c2987508e099d2bbfe5bb2691853ad6526d7688291a55d2c9fc"} Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.462945 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="151dee50faec9c2987508e099d2bbfe5bb2691853ad6526d7688291a55d2c9fc" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.465700 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-wx7wk" event={"ID":"45d9dcbe-2050-4bb6-b5f2-8836006e5085","Type":"ContainerDied","Data":"a11adf6facd38e6a14f14584e42c8c2b1d3fb17e91eeb06ad34278aa868ac6dd"} Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.465743 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a11adf6facd38e6a14f14584e42c8c2b1d3fb17e91eeb06ad34278aa868ac6dd" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.465765 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-wx7wk" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.483740 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.488232 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.488221177 podStartE2EDuration="3.488221177s" podCreationTimestamp="2025-12-03 16:42:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:10.469005919 +0000 UTC m=+1427.388342362" watchObservedRunningTime="2025-12-03 16:42:10.488221177 +0000 UTC m=+1427.407557600" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.598634 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data\") pod \"d0470a66-e9eb-434e-855e-3914ece3246e\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.599089 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data-custom\") pod \"d0470a66-e9eb-434e-855e-3914ece3246e\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.599198 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msz7p\" (UniqueName: \"kubernetes.io/projected/d0470a66-e9eb-434e-855e-3914ece3246e-kube-api-access-msz7p\") pod \"d0470a66-e9eb-434e-855e-3914ece3246e\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.599221 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0470a66-e9eb-434e-855e-3914ece3246e-logs\") pod \"d0470a66-e9eb-434e-855e-3914ece3246e\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.599314 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-combined-ca-bundle\") pod \"d0470a66-e9eb-434e-855e-3914ece3246e\" (UID: \"d0470a66-e9eb-434e-855e-3914ece3246e\") " Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.605489 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d0470a66-e9eb-434e-855e-3914ece3246e" (UID: "d0470a66-e9eb-434e-855e-3914ece3246e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.607960 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0470a66-e9eb-434e-855e-3914ece3246e-logs" (OuterVolumeSpecName: "logs") pod "d0470a66-e9eb-434e-855e-3914ece3246e" (UID: "d0470a66-e9eb-434e-855e-3914ece3246e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.611375 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0470a66-e9eb-434e-855e-3914ece3246e-kube-api-access-msz7p" (OuterVolumeSpecName: "kube-api-access-msz7p") pod "d0470a66-e9eb-434e-855e-3914ece3246e" (UID: "d0470a66-e9eb-434e-855e-3914ece3246e"). InnerVolumeSpecName "kube-api-access-msz7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.663648 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:10 crc kubenswrapper[4768]: E1203 16:42:10.664100 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api-log" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.664117 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api-log" Dec 03 16:42:10 crc kubenswrapper[4768]: E1203 16:42:10.664134 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.664140 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api" Dec 03 16:42:10 crc kubenswrapper[4768]: E1203 16:42:10.664150 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d9dcbe-2050-4bb6-b5f2-8836006e5085" containerName="cloudkitty-storageinit" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.664156 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d9dcbe-2050-4bb6-b5f2-8836006e5085" containerName="cloudkitty-storageinit" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.664358 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.664389 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="45d9dcbe-2050-4bb6-b5f2-8836006e5085" containerName="cloudkitty-storageinit" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.664405 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api-log" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.665155 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.670350 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0470a66-e9eb-434e-855e-3914ece3246e" (UID: "d0470a66-e9eb-434e-855e-3914ece3246e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.674118 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.674281 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.674689 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.677353 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.677860 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-dr7j4" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.678379 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.707066 4768 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.707096 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msz7p\" (UniqueName: \"kubernetes.io/projected/d0470a66-e9eb-434e-855e-3914ece3246e-kube-api-access-msz7p\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.707105 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0470a66-e9eb-434e-855e-3914ece3246e-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.707114 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.730850 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data" (OuterVolumeSpecName: "config-data") pod "d0470a66-e9eb-434e-855e-3914ece3246e" (UID: "d0470a66-e9eb-434e-855e-3914ece3246e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.773968 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-zmshs"] Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.774519 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" podUID="634039d3-489e-42a0-8910-da39a41b0291" containerName="dnsmasq-dns" containerID="cri-o://e89c9f85aff8d903e75373a5d1244c94ac8faa949d9cf1f61ccd45bebe1d0fa2" gracePeriod=10 Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.779055 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.797679 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-4gftf"] Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.799319 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808514 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808588 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-4gftf"] Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808702 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-certs\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808767 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808837 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k67dl\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-kube-api-access-k67dl\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808867 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808902 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.808974 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0470a66-e9eb-434e-855e-3914ece3246e-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911778 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-certs\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911838 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911873 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911895 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911915 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k67dl\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-kube-api-access-k67dl\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911933 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911952 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911970 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kj5p\" (UniqueName: \"kubernetes.io/projected/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-kube-api-access-7kj5p\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.911992 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-svc\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.912011 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.912038 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.912060 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-config\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.922103 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-scripts\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.922311 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.923478 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-certs\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.924414 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.925697 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.942633 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.944382 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.949976 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.978855 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k67dl\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-kube-api-access-k67dl\") pod \"cloudkitty-proc-0\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:10 crc kubenswrapper[4768]: I1203 16:42:10.986093 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.006364 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017267 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017398 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017429 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017471 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017491 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kj5p\" (UniqueName: \"kubernetes.io/projected/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-kube-api-access-7kj5p\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017513 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-svc\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017532 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-scripts\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017574 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017615 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-config\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.017632 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f862e53e-0dfe-4de5-ad17-17670d09611b-logs\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.018496 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-nb\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.018538 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-svc\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.019097 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-swift-storage-0\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.019499 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.019544 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-certs\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.019608 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7nzj\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-kube-api-access-l7nzj\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.023464 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-config\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.024065 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-sb\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.052435 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kj5p\" (UniqueName: \"kubernetes.io/projected/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-kube-api-access-7kj5p\") pod \"dnsmasq-dns-67bdc55879-4gftf\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.121479 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.121539 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-certs\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.121571 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7nzj\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-kube-api-access-l7nzj\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.121625 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.123337 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-scripts\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.123397 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.123430 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f862e53e-0dfe-4de5-ad17-17670d09611b-logs\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.129508 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-scripts\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.131361 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.134841 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f862e53e-0dfe-4de5-ad17-17670d09611b-logs\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.138004 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-certs\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.141216 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.145183 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.148588 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7nzj\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-kube-api-access-l7nzj\") pod \"cloudkitty-api-0\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.171128 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.258003 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.317048 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" podUID="634039d3-489e-42a0-8910-da39a41b0291" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.180:5353: connect: connection refused" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.382492 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.505818 4768 generic.go:334] "Generic (PLEG): container finished" podID="634039d3-489e-42a0-8910-da39a41b0291" containerID="e89c9f85aff8d903e75373a5d1244c94ac8faa949d9cf1f61ccd45bebe1d0fa2" exitCode=0 Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.505955 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" event={"ID":"634039d3-489e-42a0-8910-da39a41b0291","Type":"ContainerDied","Data":"e89c9f85aff8d903e75373a5d1244c94ac8faa949d9cf1f61ccd45bebe1d0fa2"} Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.507787 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-bc888b5d-2h7qr" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.578281 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:11 crc kubenswrapper[4768]: W1203 16:42:11.594054 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb84f3e07_c6e9_4d59_ab43_7eb487775755.slice/crio-fc698fcccafed455faf208b6f811bb1e18a0f1b36c20815a16739572ff504298 WatchSource:0}: Error finding container fc698fcccafed455faf208b6f811bb1e18a0f1b36c20815a16739572ff504298: Status 404 returned error can't find the container with id fc698fcccafed455faf208b6f811bb1e18a0f1b36c20815a16739572ff504298 Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.603385 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-bc888b5d-2h7qr"] Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.605894 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.615440 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-bc888b5d-2h7qr"] Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.663183 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:11 crc kubenswrapper[4768]: I1203 16:42:11.705353 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-4gftf"] Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.011336 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.013235 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.145322 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-nb\") pod \"634039d3-489e-42a0-8910-da39a41b0291\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.145371 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-sb\") pod \"634039d3-489e-42a0-8910-da39a41b0291\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.145562 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-swift-storage-0\") pod \"634039d3-489e-42a0-8910-da39a41b0291\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.145590 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-config\") pod \"634039d3-489e-42a0-8910-da39a41b0291\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.145730 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-svc\") pod \"634039d3-489e-42a0-8910-da39a41b0291\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.145761 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cspqj\" (UniqueName: \"kubernetes.io/projected/634039d3-489e-42a0-8910-da39a41b0291-kube-api-access-cspqj\") pod \"634039d3-489e-42a0-8910-da39a41b0291\" (UID: \"634039d3-489e-42a0-8910-da39a41b0291\") " Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.152078 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/634039d3-489e-42a0-8910-da39a41b0291-kube-api-access-cspqj" (OuterVolumeSpecName: "kube-api-access-cspqj") pod "634039d3-489e-42a0-8910-da39a41b0291" (UID: "634039d3-489e-42a0-8910-da39a41b0291"). InnerVolumeSpecName "kube-api-access-cspqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.216287 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "634039d3-489e-42a0-8910-da39a41b0291" (UID: "634039d3-489e-42a0-8910-da39a41b0291"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.217379 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "634039d3-489e-42a0-8910-da39a41b0291" (UID: "634039d3-489e-42a0-8910-da39a41b0291"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.218918 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-config" (OuterVolumeSpecName: "config") pod "634039d3-489e-42a0-8910-da39a41b0291" (UID: "634039d3-489e-42a0-8910-da39a41b0291"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.222220 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "634039d3-489e-42a0-8910-da39a41b0291" (UID: "634039d3-489e-42a0-8910-da39a41b0291"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.249223 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.249565 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.249739 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.249824 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.249915 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cspqj\" (UniqueName: \"kubernetes.io/projected/634039d3-489e-42a0-8910-da39a41b0291-kube-api-access-cspqj\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.257348 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "634039d3-489e-42a0-8910-da39a41b0291" (UID: "634039d3-489e-42a0-8910-da39a41b0291"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.352101 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/634039d3-489e-42a0-8910-da39a41b0291-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.516475 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" event={"ID":"634039d3-489e-42a0-8910-da39a41b0291","Type":"ContainerDied","Data":"9b8ba17da8a323f9a588ee4bee0c8b03f6805880a08f9b7a73eb83410fed21b1"} Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.516521 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-zmshs" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.516546 4768 scope.go:117] "RemoveContainer" containerID="e89c9f85aff8d903e75373a5d1244c94ac8faa949d9cf1f61ccd45bebe1d0fa2" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.524479 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b84f3e07-c6e9-4d59-ab43-7eb487775755","Type":"ContainerStarted","Data":"fc698fcccafed455faf208b6f811bb1e18a0f1b36c20815a16739572ff504298"} Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.525992 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f862e53e-0dfe-4de5-ad17-17670d09611b","Type":"ContainerStarted","Data":"469d858234e102f80c854fb85bdb9f5ff9b7c6b632d89fdfafd9fb9d844bc4de"} Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.526117 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f862e53e-0dfe-4de5-ad17-17670d09611b","Type":"ContainerStarted","Data":"bb0e31f2e197e20033184a58fe98e41d823d22ce73afdba72e4214ffa5ab0a09"} Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.527424 4768 generic.go:334] "Generic (PLEG): container finished" podID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerID="ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7" exitCode=0 Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.529829 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="cinder-scheduler" containerID="cri-o://61fe7dda4c0ddc2c69dc50f42f2cc41bd1ac45b7a1e5b15727bb00405f5c0b84" gracePeriod=30 Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.530065 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="probe" containerID="cri-o://3c784bf25891f77f5b5332ff1e7dedbaf3b15fe22fd4ec53e93f510ac7a76f1c" gracePeriod=30 Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.527580 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" event={"ID":"4b54d43f-bef8-4ee6-909f-f550a7e70cbd","Type":"ContainerDied","Data":"ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7"} Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.530316 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" event={"ID":"4b54d43f-bef8-4ee6-909f-f550a7e70cbd","Type":"ContainerStarted","Data":"7c12ac465cbc558a4c993f3d289cc80edd6b83603017b0b68b3e7cd43db46934"} Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.572307 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-zmshs"] Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.581496 4768 scope.go:117] "RemoveContainer" containerID="a5b5b7b4f52eb27090b1a6662df12a1f32e7a1123f542075a1b3b3c4c47c984d" Dec 03 16:42:12 crc kubenswrapper[4768]: I1203 16:42:12.587503 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-zmshs"] Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.290359 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.582466 4768 generic.go:334] "Generic (PLEG): container finished" podID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerID="3c784bf25891f77f5b5332ff1e7dedbaf3b15fe22fd4ec53e93f510ac7a76f1c" exitCode=0 Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.582493 4768 generic.go:334] "Generic (PLEG): container finished" podID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerID="61fe7dda4c0ddc2c69dc50f42f2cc41bd1ac45b7a1e5b15727bb00405f5c0b84" exitCode=0 Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.593006 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="634039d3-489e-42a0-8910-da39a41b0291" path="/var/lib/kubelet/pods/634039d3-489e-42a0-8910-da39a41b0291/volumes" Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.593613 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" path="/var/lib/kubelet/pods/d0470a66-e9eb-434e-855e-3914ece3246e/volumes" Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.594168 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"24d2eb54-d9b1-4202-a313-0a61842a8258","Type":"ContainerDied","Data":"3c784bf25891f77f5b5332ff1e7dedbaf3b15fe22fd4ec53e93f510ac7a76f1c"} Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.594191 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"24d2eb54-d9b1-4202-a313-0a61842a8258","Type":"ContainerDied","Data":"61fe7dda4c0ddc2c69dc50f42f2cc41bd1ac45b7a1e5b15727bb00405f5c0b84"} Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.599923 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" event={"ID":"4b54d43f-bef8-4ee6-909f-f550a7e70cbd","Type":"ContainerStarted","Data":"66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf"} Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.600713 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.608130 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b84f3e07-c6e9-4d59-ab43-7eb487775755","Type":"ContainerStarted","Data":"dda4024033dcafb51e4167cbe1aef74fbf5ff4098f28098e4199e9660cc12370"} Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.620016 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f862e53e-0dfe-4de5-ad17-17670d09611b","Type":"ContainerStarted","Data":"5bd7e577ab161c7d2b2936170a5137b1d0e0dfa4dcbb3d728ba2acb7e8229528"} Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.620738 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.693358 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.7092164480000003 podStartE2EDuration="3.693340452s" podCreationTimestamp="2025-12-03 16:42:10 +0000 UTC" firstStartedPulling="2025-12-03 16:42:11.597220605 +0000 UTC m=+1428.516557028" lastFinishedPulling="2025-12-03 16:42:12.581344609 +0000 UTC m=+1429.500681032" observedRunningTime="2025-12-03 16:42:13.686894601 +0000 UTC m=+1430.606231024" watchObservedRunningTime="2025-12-03 16:42:13.693340452 +0000 UTC m=+1430.612676875" Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.737189 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" podStartSLOduration=3.737165955 podStartE2EDuration="3.737165955s" podCreationTimestamp="2025-12-03 16:42:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:13.66798452 +0000 UTC m=+1430.587320943" watchObservedRunningTime="2025-12-03 16:42:13.737165955 +0000 UTC m=+1430.656502378" Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.776137 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.779811 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.779788373 podStartE2EDuration="3.779788373s" podCreationTimestamp="2025-12-03 16:42:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:13.729519069 +0000 UTC m=+1430.648855512" watchObservedRunningTime="2025-12-03 16:42:13.779788373 +0000 UTC m=+1430.699124786" Dec 03 16:42:13 crc kubenswrapper[4768]: I1203 16:42:13.975233 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.090589 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-combined-ca-bundle\") pod \"24d2eb54-d9b1-4202-a313-0a61842a8258\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.090709 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-scripts\") pod \"24d2eb54-d9b1-4202-a313-0a61842a8258\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.090775 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24d2eb54-d9b1-4202-a313-0a61842a8258-etc-machine-id\") pod \"24d2eb54-d9b1-4202-a313-0a61842a8258\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.090834 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data-custom\") pod \"24d2eb54-d9b1-4202-a313-0a61842a8258\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.090940 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data\") pod \"24d2eb54-d9b1-4202-a313-0a61842a8258\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.091001 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnqzh\" (UniqueName: \"kubernetes.io/projected/24d2eb54-d9b1-4202-a313-0a61842a8258-kube-api-access-qnqzh\") pod \"24d2eb54-d9b1-4202-a313-0a61842a8258\" (UID: \"24d2eb54-d9b1-4202-a313-0a61842a8258\") " Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.093433 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24d2eb54-d9b1-4202-a313-0a61842a8258-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "24d2eb54-d9b1-4202-a313-0a61842a8258" (UID: "24d2eb54-d9b1-4202-a313-0a61842a8258"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.096982 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "24d2eb54-d9b1-4202-a313-0a61842a8258" (UID: "24d2eb54-d9b1-4202-a313-0a61842a8258"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.097272 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24d2eb54-d9b1-4202-a313-0a61842a8258-kube-api-access-qnqzh" (OuterVolumeSpecName: "kube-api-access-qnqzh") pod "24d2eb54-d9b1-4202-a313-0a61842a8258" (UID: "24d2eb54-d9b1-4202-a313-0a61842a8258"). InnerVolumeSpecName "kube-api-access-qnqzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.097826 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-scripts" (OuterVolumeSpecName: "scripts") pod "24d2eb54-d9b1-4202-a313-0a61842a8258" (UID: "24d2eb54-d9b1-4202-a313-0a61842a8258"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.161449 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "24d2eb54-d9b1-4202-a313-0a61842a8258" (UID: "24d2eb54-d9b1-4202-a313-0a61842a8258"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.193529 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnqzh\" (UniqueName: \"kubernetes.io/projected/24d2eb54-d9b1-4202-a313-0a61842a8258-kube-api-access-qnqzh\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.193561 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.193570 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.193579 4768 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/24d2eb54-d9b1-4202-a313-0a61842a8258-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.193588 4768 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.212381 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data" (OuterVolumeSpecName: "config-data") pod "24d2eb54-d9b1-4202-a313-0a61842a8258" (UID: "24d2eb54-d9b1-4202-a313-0a61842a8258"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.294752 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/24d2eb54-d9b1-4202-a313-0a61842a8258-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.633456 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"24d2eb54-d9b1-4202-a313-0a61842a8258","Type":"ContainerDied","Data":"e991f6706af833b7c6e7c1f84cfe9059ade45542682cee55ce29a952455cc834"} Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.633838 4768 scope.go:117] "RemoveContainer" containerID="3c784bf25891f77f5b5332ff1e7dedbaf3b15fe22fd4ec53e93f510ac7a76f1c" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.633971 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api-log" containerID="cri-o://469d858234e102f80c854fb85bdb9f5ff9b7c6b632d89fdfafd9fb9d844bc4de" gracePeriod=30 Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.634039 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.634797 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api" containerID="cri-o://5bd7e577ab161c7d2b2936170a5137b1d0e0dfa4dcbb3d728ba2acb7e8229528" gracePeriod=30 Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.695089 4768 scope.go:117] "RemoveContainer" containerID="61fe7dda4c0ddc2c69dc50f42f2cc41bd1ac45b7a1e5b15727bb00405f5c0b84" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.699224 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.720964 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.743642 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:14 crc kubenswrapper[4768]: E1203 16:42:14.744059 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="cinder-scheduler" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.744077 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="cinder-scheduler" Dec 03 16:42:14 crc kubenswrapper[4768]: E1203 16:42:14.744099 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="probe" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.744105 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="probe" Dec 03 16:42:14 crc kubenswrapper[4768]: E1203 16:42:14.744119 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="634039d3-489e-42a0-8910-da39a41b0291" containerName="init" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.744126 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="634039d3-489e-42a0-8910-da39a41b0291" containerName="init" Dec 03 16:42:14 crc kubenswrapper[4768]: E1203 16:42:14.744144 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="634039d3-489e-42a0-8910-da39a41b0291" containerName="dnsmasq-dns" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.744150 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="634039d3-489e-42a0-8910-da39a41b0291" containerName="dnsmasq-dns" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.744322 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="634039d3-489e-42a0-8910-da39a41b0291" containerName="dnsmasq-dns" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.744349 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="cinder-scheduler" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.744362 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" containerName="probe" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.745406 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.759166 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.767915 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.806315 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-scripts\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.806351 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-config-data\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.806395 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ss2k\" (UniqueName: \"kubernetes.io/projected/0378dd82-69e6-42b8-b5dd-26751ef9a0db-kube-api-access-2ss2k\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.806417 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0378dd82-69e6-42b8-b5dd-26751ef9a0db-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.806449 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.806524 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.907926 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.908072 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.908178 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-scripts\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.908200 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-config-data\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.908263 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ss2k\" (UniqueName: \"kubernetes.io/projected/0378dd82-69e6-42b8-b5dd-26751ef9a0db-kube-api-access-2ss2k\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.908294 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0378dd82-69e6-42b8-b5dd-26751ef9a0db-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.908393 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0378dd82-69e6-42b8-b5dd-26751ef9a0db-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.916245 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-scripts\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.917056 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-config-data\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.917507 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.925227 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378dd82-69e6-42b8-b5dd-26751ef9a0db-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:14 crc kubenswrapper[4768]: I1203 16:42:14.934007 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ss2k\" (UniqueName: \"kubernetes.io/projected/0378dd82-69e6-42b8-b5dd-26751ef9a0db-kube-api-access-2ss2k\") pod \"cinder-scheduler-0\" (UID: \"0378dd82-69e6-42b8-b5dd-26751ef9a0db\") " pod="openstack/cinder-scheduler-0" Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.099906 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.457853 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-bc888b5d-2h7qr" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.177:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.457872 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-bc888b5d-2h7qr" podUID="d0470a66-e9eb-434e-855e-3914ece3246e" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.177:9311/healthcheck\": dial tcp 10.217.0.177:9311: i/o timeout (Client.Timeout exceeded while awaiting headers)" Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.553095 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24d2eb54-d9b1-4202-a313-0a61842a8258" path="/var/lib/kubelet/pods/24d2eb54-d9b1-4202-a313-0a61842a8258/volumes" Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.628953 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.643302 4768 generic.go:334] "Generic (PLEG): container finished" podID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerID="5bd7e577ab161c7d2b2936170a5137b1d0e0dfa4dcbb3d728ba2acb7e8229528" exitCode=0 Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.643329 4768 generic.go:334] "Generic (PLEG): container finished" podID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerID="469d858234e102f80c854fb85bdb9f5ff9b7c6b632d89fdfafd9fb9d844bc4de" exitCode=143 Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.643366 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f862e53e-0dfe-4de5-ad17-17670d09611b","Type":"ContainerDied","Data":"5bd7e577ab161c7d2b2936170a5137b1d0e0dfa4dcbb3d728ba2acb7e8229528"} Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.643391 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f862e53e-0dfe-4de5-ad17-17670d09611b","Type":"ContainerDied","Data":"469d858234e102f80c854fb85bdb9f5ff9b7c6b632d89fdfafd9fb9d844bc4de"} Dec 03 16:42:15 crc kubenswrapper[4768]: W1203 16:42:15.645766 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0378dd82_69e6_42b8_b5dd_26751ef9a0db.slice/crio-d1cad66ee8867d3420a0432b38964a4a86aeb17ae7d86feecbc3683b75950e89 WatchSource:0}: Error finding container d1cad66ee8867d3420a0432b38964a4a86aeb17ae7d86feecbc3683b75950e89: Status 404 returned error can't find the container with id d1cad66ee8867d3420a0432b38964a4a86aeb17ae7d86feecbc3683b75950e89 Dec 03 16:42:15 crc kubenswrapper[4768]: I1203 16:42:15.647200 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="b84f3e07-c6e9-4d59-ab43-7eb487775755" containerName="cloudkitty-proc" containerID="cri-o://dda4024033dcafb51e4167cbe1aef74fbf5ff4098f28098e4199e9660cc12370" gracePeriod=30 Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.078733 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.147011 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f862e53e-0dfe-4de5-ad17-17670d09611b-logs\") pod \"f862e53e-0dfe-4de5-ad17-17670d09611b\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.147172 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-combined-ca-bundle\") pod \"f862e53e-0dfe-4de5-ad17-17670d09611b\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.147520 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f862e53e-0dfe-4de5-ad17-17670d09611b-logs" (OuterVolumeSpecName: "logs") pod "f862e53e-0dfe-4de5-ad17-17670d09611b" (UID: "f862e53e-0dfe-4de5-ad17-17670d09611b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.147707 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data-custom\") pod \"f862e53e-0dfe-4de5-ad17-17670d09611b\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.147850 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7nzj\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-kube-api-access-l7nzj\") pod \"f862e53e-0dfe-4de5-ad17-17670d09611b\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.147955 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-certs\") pod \"f862e53e-0dfe-4de5-ad17-17670d09611b\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.148113 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-scripts\") pod \"f862e53e-0dfe-4de5-ad17-17670d09611b\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.148211 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data\") pod \"f862e53e-0dfe-4de5-ad17-17670d09611b\" (UID: \"f862e53e-0dfe-4de5-ad17-17670d09611b\") " Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.152071 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f862e53e-0dfe-4de5-ad17-17670d09611b" (UID: "f862e53e-0dfe-4de5-ad17-17670d09611b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.152703 4768 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.152725 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f862e53e-0dfe-4de5-ad17-17670d09611b-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.155981 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-scripts" (OuterVolumeSpecName: "scripts") pod "f862e53e-0dfe-4de5-ad17-17670d09611b" (UID: "f862e53e-0dfe-4de5-ad17-17670d09611b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.157059 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-kube-api-access-l7nzj" (OuterVolumeSpecName: "kube-api-access-l7nzj") pod "f862e53e-0dfe-4de5-ad17-17670d09611b" (UID: "f862e53e-0dfe-4de5-ad17-17670d09611b"). InnerVolumeSpecName "kube-api-access-l7nzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.168667 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-certs" (OuterVolumeSpecName: "certs") pod "f862e53e-0dfe-4de5-ad17-17670d09611b" (UID: "f862e53e-0dfe-4de5-ad17-17670d09611b"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.176496 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data" (OuterVolumeSpecName: "config-data") pod "f862e53e-0dfe-4de5-ad17-17670d09611b" (UID: "f862e53e-0dfe-4de5-ad17-17670d09611b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.188478 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f862e53e-0dfe-4de5-ad17-17670d09611b" (UID: "f862e53e-0dfe-4de5-ad17-17670d09611b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.254147 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7nzj\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-kube-api-access-l7nzj\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.254183 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f862e53e-0dfe-4de5-ad17-17670d09611b-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.254192 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.254202 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.254211 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f862e53e-0dfe-4de5-ad17-17670d09611b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.672362 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"f862e53e-0dfe-4de5-ad17-17670d09611b","Type":"ContainerDied","Data":"bb0e31f2e197e20033184a58fe98e41d823d22ce73afdba72e4214ffa5ab0a09"} Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.672738 4768 scope.go:117] "RemoveContainer" containerID="5bd7e577ab161c7d2b2936170a5137b1d0e0dfa4dcbb3d728ba2acb7e8229528" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.672436 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.675968 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0378dd82-69e6-42b8-b5dd-26751ef9a0db","Type":"ContainerStarted","Data":"9e0d241a7319160f9ccb2306d63ef9202d38977a5c722bd36e933569ac47892c"} Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.676015 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0378dd82-69e6-42b8-b5dd-26751ef9a0db","Type":"ContainerStarted","Data":"d1cad66ee8867d3420a0432b38964a4a86aeb17ae7d86feecbc3683b75950e89"} Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.723843 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.725641 4768 scope.go:117] "RemoveContainer" containerID="469d858234e102f80c854fb85bdb9f5ff9b7c6b632d89fdfafd9fb9d844bc4de" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.735313 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.766453 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:16 crc kubenswrapper[4768]: E1203 16:42:16.766974 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api-log" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.766990 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api-log" Dec 03 16:42:16 crc kubenswrapper[4768]: E1203 16:42:16.767015 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.767023 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.767278 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.767301 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" containerName="cloudkitty-api-log" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.768666 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.771925 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.772245 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.772408 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.789173 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.876865 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877015 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-scripts\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877155 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877301 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw9nr\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-kube-api-access-zw9nr\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877378 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877410 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877518 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d103c3d-bc24-441d-a619-c02dd3be204f-logs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877656 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.877777 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.980654 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.981438 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-scripts\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.981530 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.981673 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw9nr\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-kube-api-access-zw9nr\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.981793 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.981818 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.981971 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d103c3d-bc24-441d-a619-c02dd3be204f-logs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.982030 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.982074 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.982513 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d103c3d-bc24-441d-a619-c02dd3be204f-logs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.985608 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.985653 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.987473 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.988065 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.988084 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-scripts\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.989426 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:16 crc kubenswrapper[4768]: I1203 16:42:16.990060 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:17 crc kubenswrapper[4768]: I1203 16:42:17.001368 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw9nr\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-kube-api-access-zw9nr\") pod \"cloudkitty-api-0\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " pod="openstack/cloudkitty-api-0" Dec 03 16:42:17 crc kubenswrapper[4768]: I1203 16:42:17.091303 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:42:17 crc kubenswrapper[4768]: I1203 16:42:17.542686 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f862e53e-0dfe-4de5-ad17-17670d09611b" path="/var/lib/kubelet/pods/f862e53e-0dfe-4de5-ad17-17670d09611b/volumes" Dec 03 16:42:17 crc kubenswrapper[4768]: I1203 16:42:17.685633 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:42:17 crc kubenswrapper[4768]: I1203 16:42:17.687399 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0378dd82-69e6-42b8-b5dd-26751ef9a0db","Type":"ContainerStarted","Data":"c6f5a1bd86454a5cbd69e42ef47d0a07d95b014b9b0df3e5c6b458ea77a873cc"} Dec 03 16:42:17 crc kubenswrapper[4768]: W1203 16:42:17.692032 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d103c3d_bc24_441d_a619_c02dd3be204f.slice/crio-ae0552e041d8c5186c19698b3ac04b09e442c012fc3bba3bba74885ffbb1b2a0 WatchSource:0}: Error finding container ae0552e041d8c5186c19698b3ac04b09e442c012fc3bba3bba74885ffbb1b2a0: Status 404 returned error can't find the container with id ae0552e041d8c5186c19698b3ac04b09e442c012fc3bba3bba74885ffbb1b2a0 Dec 03 16:42:17 crc kubenswrapper[4768]: I1203 16:42:17.713352 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.713335545 podStartE2EDuration="3.713335545s" podCreationTimestamp="2025-12-03 16:42:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:17.710278768 +0000 UTC m=+1434.629615191" watchObservedRunningTime="2025-12-03 16:42:17.713335545 +0000 UTC m=+1434.632671968" Dec 03 16:42:18 crc kubenswrapper[4768]: I1203 16:42:18.702766 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"3d103c3d-bc24-441d-a619-c02dd3be204f","Type":"ContainerStarted","Data":"623e299e4155734d13d177d61ad0fdfd877de0186ce3f807b118ce3db9b38879"} Dec 03 16:42:18 crc kubenswrapper[4768]: I1203 16:42:18.703238 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"3d103c3d-bc24-441d-a619-c02dd3be204f","Type":"ContainerStarted","Data":"17f8b4c0fe15656d345f5167fba5978f47abd64cb728bae5df7a3d6fb0064fd0"} Dec 03 16:42:18 crc kubenswrapper[4768]: I1203 16:42:18.703252 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"3d103c3d-bc24-441d-a619-c02dd3be204f","Type":"ContainerStarted","Data":"ae0552e041d8c5186c19698b3ac04b09e442c012fc3bba3bba74885ffbb1b2a0"} Dec 03 16:42:19 crc kubenswrapper[4768]: I1203 16:42:19.702229 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 03 16:42:19 crc kubenswrapper[4768]: I1203 16:42:19.711587 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 03 16:42:19 crc kubenswrapper[4768]: I1203 16:42:19.749762 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.749732781 podStartE2EDuration="3.749732781s" podCreationTimestamp="2025-12-03 16:42:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:18.729857691 +0000 UTC m=+1435.649194124" watchObservedRunningTime="2025-12-03 16:42:19.749732781 +0000 UTC m=+1436.669069244" Dec 03 16:42:20 crc kubenswrapper[4768]: I1203 16:42:20.100430 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.172732 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.252542 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vpdh6"] Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.253031 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" podUID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerName="dnsmasq-dns" containerID="cri-o://3d46530bb1488ece09d4580c4f327c3a4d0037fd23440855cbf068c1d0ce95ef" gracePeriod=10 Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.255711 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7c7c5849fb-krxhd" Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.734108 4768 generic.go:334] "Generic (PLEG): container finished" podID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerID="3d46530bb1488ece09d4580c4f327c3a4d0037fd23440855cbf068c1d0ce95ef" exitCode=0 Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.734200 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" event={"ID":"4dc526fe-7c3a-41f9-94a0-76907850610e","Type":"ContainerDied","Data":"3d46530bb1488ece09d4580c4f327c3a4d0037fd23440855cbf068c1d0ce95ef"} Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.735955 4768 generic.go:334] "Generic (PLEG): container finished" podID="b84f3e07-c6e9-4d59-ab43-7eb487775755" containerID="dda4024033dcafb51e4167cbe1aef74fbf5ff4098f28098e4199e9660cc12370" exitCode=0 Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.736016 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b84f3e07-c6e9-4d59-ab43-7eb487775755","Type":"ContainerDied","Data":"dda4024033dcafb51e4167cbe1aef74fbf5ff4098f28098e4199e9660cc12370"} Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.781903 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-85896597d4-l886p" Dec 03 16:42:21 crc kubenswrapper[4768]: I1203 16:42:21.810424 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-85896597d4-l886p" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.365515 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.370564 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.428689 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k67dl\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-kube-api-access-k67dl\") pod \"b84f3e07-c6e9-4d59-ab43-7eb487775755\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.428957 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-scripts\") pod \"b84f3e07-c6e9-4d59-ab43-7eb487775755\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429032 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data-custom\") pod \"b84f3e07-c6e9-4d59-ab43-7eb487775755\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429105 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-svc\") pod \"4dc526fe-7c3a-41f9-94a0-76907850610e\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429307 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-swift-storage-0\") pod \"4dc526fe-7c3a-41f9-94a0-76907850610e\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429401 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data\") pod \"b84f3e07-c6e9-4d59-ab43-7eb487775755\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429484 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb24j\" (UniqueName: \"kubernetes.io/projected/4dc526fe-7c3a-41f9-94a0-76907850610e-kube-api-access-tb24j\") pod \"4dc526fe-7c3a-41f9-94a0-76907850610e\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429567 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-config\") pod \"4dc526fe-7c3a-41f9-94a0-76907850610e\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429694 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-combined-ca-bundle\") pod \"b84f3e07-c6e9-4d59-ab43-7eb487775755\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429785 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-certs\") pod \"b84f3e07-c6e9-4d59-ab43-7eb487775755\" (UID: \"b84f3e07-c6e9-4d59-ab43-7eb487775755\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429865 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-sb\") pod \"4dc526fe-7c3a-41f9-94a0-76907850610e\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.429934 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-nb\") pod \"4dc526fe-7c3a-41f9-94a0-76907850610e\" (UID: \"4dc526fe-7c3a-41f9-94a0-76907850610e\") " Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.442330 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-scripts" (OuterVolumeSpecName: "scripts") pod "b84f3e07-c6e9-4d59-ab43-7eb487775755" (UID: "b84f3e07-c6e9-4d59-ab43-7eb487775755"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.453932 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b84f3e07-c6e9-4d59-ab43-7eb487775755" (UID: "b84f3e07-c6e9-4d59-ab43-7eb487775755"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.465574 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 03 16:42:22 crc kubenswrapper[4768]: E1203 16:42:22.465955 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerName="dnsmasq-dns" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.465973 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerName="dnsmasq-dns" Dec 03 16:42:22 crc kubenswrapper[4768]: E1203 16:42:22.465997 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b84f3e07-c6e9-4d59-ab43-7eb487775755" containerName="cloudkitty-proc" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.466003 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b84f3e07-c6e9-4d59-ab43-7eb487775755" containerName="cloudkitty-proc" Dec 03 16:42:22 crc kubenswrapper[4768]: E1203 16:42:22.466015 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerName="init" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.466021 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerName="init" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.466206 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b84f3e07-c6e9-4d59-ab43-7eb487775755" containerName="cloudkitty-proc" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.466231 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dc526fe-7c3a-41f9-94a0-76907850610e" containerName="dnsmasq-dns" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.466939 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.470619 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.470767 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.470852 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-wkm4l" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.479449 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-certs" (OuterVolumeSpecName: "certs") pod "b84f3e07-c6e9-4d59-ab43-7eb487775755" (UID: "b84f3e07-c6e9-4d59-ab43-7eb487775755"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.484617 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.494120 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dc526fe-7c3a-41f9-94a0-76907850610e-kube-api-access-tb24j" (OuterVolumeSpecName: "kube-api-access-tb24j") pod "4dc526fe-7c3a-41f9-94a0-76907850610e" (UID: "4dc526fe-7c3a-41f9-94a0-76907850610e"). InnerVolumeSpecName "kube-api-access-tb24j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.495432 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-kube-api-access-k67dl" (OuterVolumeSpecName: "kube-api-access-k67dl") pod "b84f3e07-c6e9-4d59-ab43-7eb487775755" (UID: "b84f3e07-c6e9-4d59-ab43-7eb487775755"). InnerVolumeSpecName "kube-api-access-k67dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.533337 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bddf09b-660e-4615-a1c6-72d46c7c2216-openstack-config-secret\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.533374 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bddf09b-660e-4615-a1c6-72d46c7c2216-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.533572 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bddf09b-660e-4615-a1c6-72d46c7c2216-openstack-config\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.534066 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk2vb\" (UniqueName: \"kubernetes.io/projected/8bddf09b-660e-4615-a1c6-72d46c7c2216-kube-api-access-hk2vb\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.534294 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.534314 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k67dl\" (UniqueName: \"kubernetes.io/projected/b84f3e07-c6e9-4d59-ab43-7eb487775755-kube-api-access-k67dl\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.534324 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.534333 4768 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.534361 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb24j\" (UniqueName: \"kubernetes.io/projected/4dc526fe-7c3a-41f9-94a0-76907850610e-kube-api-access-tb24j\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.547386 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data" (OuterVolumeSpecName: "config-data") pod "b84f3e07-c6e9-4d59-ab43-7eb487775755" (UID: "b84f3e07-c6e9-4d59-ab43-7eb487775755"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.576307 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4dc526fe-7c3a-41f9-94a0-76907850610e" (UID: "4dc526fe-7c3a-41f9-94a0-76907850610e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.577261 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4dc526fe-7c3a-41f9-94a0-76907850610e" (UID: "4dc526fe-7c3a-41f9-94a0-76907850610e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.581735 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4dc526fe-7c3a-41f9-94a0-76907850610e" (UID: "4dc526fe-7c3a-41f9-94a0-76907850610e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.591020 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b84f3e07-c6e9-4d59-ab43-7eb487775755" (UID: "b84f3e07-c6e9-4d59-ab43-7eb487775755"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.602079 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-config" (OuterVolumeSpecName: "config") pod "4dc526fe-7c3a-41f9-94a0-76907850610e" (UID: "4dc526fe-7c3a-41f9-94a0-76907850610e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.628569 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4dc526fe-7c3a-41f9-94a0-76907850610e" (UID: "4dc526fe-7c3a-41f9-94a0-76907850610e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.636292 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bddf09b-660e-4615-a1c6-72d46c7c2216-openstack-config-secret\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.636408 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bddf09b-660e-4615-a1c6-72d46c7c2216-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.636539 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bddf09b-660e-4615-a1c6-72d46c7c2216-openstack-config\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.636707 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk2vb\" (UniqueName: \"kubernetes.io/projected/8bddf09b-660e-4615-a1c6-72d46c7c2216-kube-api-access-hk2vb\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.636905 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.636966 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.637030 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.637096 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.637168 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.637245 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b84f3e07-c6e9-4d59-ab43-7eb487775755-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.637321 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4dc526fe-7c3a-41f9-94a0-76907850610e-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.637555 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/8bddf09b-660e-4615-a1c6-72d46c7c2216-openstack-config\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.641580 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bddf09b-660e-4615-a1c6-72d46c7c2216-combined-ca-bundle\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.644662 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/8bddf09b-660e-4615-a1c6-72d46c7c2216-openstack-config-secret\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.655405 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk2vb\" (UniqueName: \"kubernetes.io/projected/8bddf09b-660e-4615-a1c6-72d46c7c2216-kube-api-access-hk2vb\") pod \"openstackclient\" (UID: \"8bddf09b-660e-4615-a1c6-72d46c7c2216\") " pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.747222 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" event={"ID":"4dc526fe-7c3a-41f9-94a0-76907850610e","Type":"ContainerDied","Data":"f3be0ef4ac8cd4cea82c4db8c60c6f4e80983e628975668dde0616109c2ee390"} Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.747271 4768 scope.go:117] "RemoveContainer" containerID="3d46530bb1488ece09d4580c4f327c3a4d0037fd23440855cbf068c1d0ce95ef" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.747392 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vpdh6" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.754945 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.754941 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"b84f3e07-c6e9-4d59-ab43-7eb487775755","Type":"ContainerDied","Data":"fc698fcccafed455faf208b6f811bb1e18a0f1b36c20815a16739572ff504298"} Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.786638 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vpdh6"] Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.792058 4768 scope.go:117] "RemoveContainer" containerID="d6022d498736a31b5428705c28b50c236446de9b5e65e78b4995b8871f22a37c" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.800551 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vpdh6"] Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.811845 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.820969 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.823840 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.828746 4768 scope.go:117] "RemoveContainer" containerID="dda4024033dcafb51e4167cbe1aef74fbf5ff4098f28098e4199e9660cc12370" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.829052 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.831146 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.843656 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.847479 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.900773 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.942410 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-scripts\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.942477 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f885p\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-kube-api-access-f885p\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.942534 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.942563 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.942585 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:22 crc kubenswrapper[4768]: I1203 16:42:22.942680 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-certs\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.045047 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-scripts\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.045364 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f885p\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-kube-api-access-f885p\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.045404 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.045422 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.045438 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.045489 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-certs\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.049444 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-scripts\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.049748 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-certs\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.052683 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.056607 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.057299 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.074180 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f885p\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-kube-api-access-f885p\") pod \"cloudkitty-proc-0\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.156015 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.366650 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.544609 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dc526fe-7c3a-41f9-94a0-76907850610e" path="/var/lib/kubelet/pods/4dc526fe-7c3a-41f9-94a0-76907850610e/volumes" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.545537 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b84f3e07-c6e9-4d59-ab43-7eb487775755" path="/var/lib/kubelet/pods/b84f3e07-c6e9-4d59-ab43-7eb487775755/volumes" Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.611195 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.770817 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8bddf09b-660e-4615-a1c6-72d46c7c2216","Type":"ContainerStarted","Data":"8da794fc0ffa6a58424f846dc48f5cd8fdd8772f20e0395102dcb77e928eb370"} Dec 03 16:42:23 crc kubenswrapper[4768]: I1203 16:42:23.792802 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4f24e895-8599-4764-9cbd-30a74bad2423","Type":"ContainerStarted","Data":"d25433323d48ece6473952cd4ab3735dbcd715b78df375ef24bba71c83d5c952"} Dec 03 16:42:24 crc kubenswrapper[4768]: I1203 16:42:24.802432 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4f24e895-8599-4764-9cbd-30a74bad2423","Type":"ContainerStarted","Data":"9df5d26753c5b7d836b831a6b72289787b31904d8f479af31de63060e92c50c3"} Dec 03 16:42:24 crc kubenswrapper[4768]: I1203 16:42:24.835848 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.835822068 podStartE2EDuration="2.835822068s" podCreationTimestamp="2025-12-03 16:42:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:24.819430712 +0000 UTC m=+1441.738767145" watchObservedRunningTime="2025-12-03 16:42:24.835822068 +0000 UTC m=+1441.755158501" Dec 03 16:42:25 crc kubenswrapper[4768]: I1203 16:42:25.338961 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.573043 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-58c4c66bd9-w8lwh"] Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.575083 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.582700 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-58c4c66bd9-w8lwh"] Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.584816 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.584926 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.585012 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.659586 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53c419ad-7c96-450d-be91-ae1598cfd390-run-httpd\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.659648 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/53c419ad-7c96-450d-be91-ae1598cfd390-etc-swift\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.659671 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-internal-tls-certs\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.659692 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gz6z\" (UniqueName: \"kubernetes.io/projected/53c419ad-7c96-450d-be91-ae1598cfd390-kube-api-access-6gz6z\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.659709 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53c419ad-7c96-450d-be91-ae1598cfd390-log-httpd\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.659870 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-config-data\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.660548 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-public-tls-certs\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.660661 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-combined-ca-bundle\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.762670 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53c419ad-7c96-450d-be91-ae1598cfd390-run-httpd\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.762721 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/53c419ad-7c96-450d-be91-ae1598cfd390-etc-swift\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.762781 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-internal-tls-certs\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.762798 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gz6z\" (UniqueName: \"kubernetes.io/projected/53c419ad-7c96-450d-be91-ae1598cfd390-kube-api-access-6gz6z\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.762843 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53c419ad-7c96-450d-be91-ae1598cfd390-log-httpd\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.762936 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-config-data\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.762954 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-public-tls-certs\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.763037 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-combined-ca-bundle\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.765197 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53c419ad-7c96-450d-be91-ae1598cfd390-run-httpd\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.765989 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53c419ad-7c96-450d-be91-ae1598cfd390-log-httpd\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.769830 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-public-tls-certs\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.771176 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-internal-tls-certs\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.772532 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/53c419ad-7c96-450d-be91-ae1598cfd390-etc-swift\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.785097 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gz6z\" (UniqueName: \"kubernetes.io/projected/53c419ad-7c96-450d-be91-ae1598cfd390-kube-api-access-6gz6z\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.785404 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-config-data\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.787513 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53c419ad-7c96-450d-be91-ae1598cfd390-combined-ca-bundle\") pod \"swift-proxy-58c4c66bd9-w8lwh\" (UID: \"53c419ad-7c96-450d-be91-ae1598cfd390\") " pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:28 crc kubenswrapper[4768]: I1203 16:42:28.893857 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:29 crc kubenswrapper[4768]: I1203 16:42:29.544806 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-58c4c66bd9-w8lwh"] Dec 03 16:42:33 crc kubenswrapper[4768]: I1203 16:42:33.904445 4768 generic.go:334] "Generic (PLEG): container finished" podID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerID="86ca132148f610dd674f2ae5c905aa646efd9aba5f29845290dd8ddb54cd133e" exitCode=137 Dec 03 16:42:33 crc kubenswrapper[4768]: I1203 16:42:33.904593 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerDied","Data":"86ca132148f610dd674f2ae5c905aa646efd9aba5f29845290dd8ddb54cd133e"} Dec 03 16:42:34 crc kubenswrapper[4768]: I1203 16:42:34.767318 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:42:34 crc kubenswrapper[4768]: I1203 16:42:34.767702 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-httpd" containerID="cri-o://6897e6021f9255f3f9879dacaa346be1f440258d4c1c009e12601d680bb41605" gracePeriod=30 Dec 03 16:42:34 crc kubenswrapper[4768]: I1203 16:42:34.767601 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-log" containerID="cri-o://56fe90609af053750cebb497d51d46888c5dc58d2760b4ac9cc2d1e0156f85e3" gracePeriod=30 Dec 03 16:42:34 crc kubenswrapper[4768]: I1203 16:42:34.934031 4768 generic.go:334] "Generic (PLEG): container finished" podID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerID="56fe90609af053750cebb497d51d46888c5dc58d2760b4ac9cc2d1e0156f85e3" exitCode=143 Dec 03 16:42:34 crc kubenswrapper[4768]: I1203 16:42:34.934122 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7126bbaa-d9d7-41db-aafa-8b783cd992fb","Type":"ContainerDied","Data":"56fe90609af053750cebb497d51d46888c5dc58d2760b4ac9cc2d1e0156f85e3"} Dec 03 16:42:35 crc kubenswrapper[4768]: W1203 16:42:35.695360 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53c419ad_7c96_450d_be91_ae1598cfd390.slice/crio-ac6cc8d441df62e932795e579e7f660a55f1d8782f70675faeb9e3b690b63cc7 WatchSource:0}: Error finding container ac6cc8d441df62e932795e579e7f660a55f1d8782f70675faeb9e3b690b63cc7: Status 404 returned error can't find the container with id ac6cc8d441df62e932795e579e7f660a55f1d8782f70675faeb9e3b690b63cc7 Dec 03 16:42:35 crc kubenswrapper[4768]: I1203 16:42:35.968651 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" event={"ID":"53c419ad-7c96-450d-be91-ae1598cfd390","Type":"ContainerStarted","Data":"ac6cc8d441df62e932795e579e7f660a55f1d8782f70675faeb9e3b690b63cc7"} Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.074874 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.124461 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-combined-ca-bundle\") pod \"821ed115-2471-4f5e-8cca-a6a6f35f165f\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.124552 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-log-httpd\") pod \"821ed115-2471-4f5e-8cca-a6a6f35f165f\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.124638 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-config-data\") pod \"821ed115-2471-4f5e-8cca-a6a6f35f165f\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.124658 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpcnh\" (UniqueName: \"kubernetes.io/projected/821ed115-2471-4f5e-8cca-a6a6f35f165f-kube-api-access-vpcnh\") pod \"821ed115-2471-4f5e-8cca-a6a6f35f165f\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.124750 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-run-httpd\") pod \"821ed115-2471-4f5e-8cca-a6a6f35f165f\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.124789 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-scripts\") pod \"821ed115-2471-4f5e-8cca-a6a6f35f165f\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.124859 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-sg-core-conf-yaml\") pod \"821ed115-2471-4f5e-8cca-a6a6f35f165f\" (UID: \"821ed115-2471-4f5e-8cca-a6a6f35f165f\") " Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.125278 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "821ed115-2471-4f5e-8cca-a6a6f35f165f" (UID: "821ed115-2471-4f5e-8cca-a6a6f35f165f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.125472 4768 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.125527 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "821ed115-2471-4f5e-8cca-a6a6f35f165f" (UID: "821ed115-2471-4f5e-8cca-a6a6f35f165f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.128336 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/821ed115-2471-4f5e-8cca-a6a6f35f165f-kube-api-access-vpcnh" (OuterVolumeSpecName: "kube-api-access-vpcnh") pod "821ed115-2471-4f5e-8cca-a6a6f35f165f" (UID: "821ed115-2471-4f5e-8cca-a6a6f35f165f"). InnerVolumeSpecName "kube-api-access-vpcnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.128852 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-scripts" (OuterVolumeSpecName: "scripts") pod "821ed115-2471-4f5e-8cca-a6a6f35f165f" (UID: "821ed115-2471-4f5e-8cca-a6a6f35f165f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.161604 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "821ed115-2471-4f5e-8cca-a6a6f35f165f" (UID: "821ed115-2471-4f5e-8cca-a6a6f35f165f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.215124 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "821ed115-2471-4f5e-8cca-a6a6f35f165f" (UID: "821ed115-2471-4f5e-8cca-a6a6f35f165f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.228951 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.228981 4768 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.228994 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.229005 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpcnh\" (UniqueName: \"kubernetes.io/projected/821ed115-2471-4f5e-8cca-a6a6f35f165f-kube-api-access-vpcnh\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.229014 4768 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/821ed115-2471-4f5e-8cca-a6a6f35f165f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.273947 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-config-data" (OuterVolumeSpecName: "config-data") pod "821ed115-2471-4f5e-8cca-a6a6f35f165f" (UID: "821ed115-2471-4f5e-8cca-a6a6f35f165f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.330541 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/821ed115-2471-4f5e-8cca-a6a6f35f165f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.657553 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.657813 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-log" containerID="cri-o://0a91d13771fdc789efbacc4260fe138fd9fa7056af604a8a93b8afceaf9549b7" gracePeriod=30 Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.657983 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-httpd" containerID="cri-o://da654315bf738832b22d3d42e152e61599c50831fe17e9585f977103544abf52" gracePeriod=30 Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.980000 4768 generic.go:334] "Generic (PLEG): container finished" podID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerID="0a91d13771fdc789efbacc4260fe138fd9fa7056af604a8a93b8afceaf9549b7" exitCode=143 Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.980097 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a","Type":"ContainerDied","Data":"0a91d13771fdc789efbacc4260fe138fd9fa7056af604a8a93b8afceaf9549b7"} Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.982161 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"8bddf09b-660e-4615-a1c6-72d46c7c2216","Type":"ContainerStarted","Data":"fa634d77a0f48ef8fc61c86fb88d038b85ad67085ebfb75e6a97e63f81b3163d"} Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.984900 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"821ed115-2471-4f5e-8cca-a6a6f35f165f","Type":"ContainerDied","Data":"2f3b4375edf721d2f3f14eebe76b94b6832c1153a67741f0f4dfe83b1c3b41c6"} Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.984925 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.985367 4768 scope.go:117] "RemoveContainer" containerID="86ca132148f610dd674f2ae5c905aa646efd9aba5f29845290dd8ddb54cd133e" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.986909 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" event={"ID":"53c419ad-7c96-450d-be91-ae1598cfd390","Type":"ContainerStarted","Data":"d9826b135b20f8fe800561647cc954c09b0218394af438eee7ae82f9a903e954"} Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.986947 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" event={"ID":"53c419ad-7c96-450d-be91-ae1598cfd390","Type":"ContainerStarted","Data":"88c521be3ad5533796c92efc2fea5767de788f6411295c05525c84caab901376"} Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.987287 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:36 crc kubenswrapper[4768]: I1203 16:42:36.987959 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.010034 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.529028494 podStartE2EDuration="15.010017252s" podCreationTimestamp="2025-12-03 16:42:22 +0000 UTC" firstStartedPulling="2025-12-03 16:42:23.378142794 +0000 UTC m=+1440.297479217" lastFinishedPulling="2025-12-03 16:42:35.859131552 +0000 UTC m=+1452.778467975" observedRunningTime="2025-12-03 16:42:37.009320027 +0000 UTC m=+1453.928656450" watchObservedRunningTime="2025-12-03 16:42:37.010017252 +0000 UTC m=+1453.929353675" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.021897 4768 scope.go:117] "RemoveContainer" containerID="0ddfde2ac9d36af44236ad8e6aa81cdaf5acf1c349f9c277243187f790161eb3" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.044795 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.052352 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.068127 4768 scope.go:117] "RemoveContainer" containerID="22013b88b7c24ff003eef4059d7e03c3cb1f824b3664b420d051ff29243df067" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.070054 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" podStartSLOduration=9.070034528 podStartE2EDuration="9.070034528s" podCreationTimestamp="2025-12-03 16:42:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:37.05403893 +0000 UTC m=+1453.973375353" watchObservedRunningTime="2025-12-03 16:42:37.070034528 +0000 UTC m=+1453.989370951" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.083722 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:37 crc kubenswrapper[4768]: E1203 16:42:37.084221 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-notification-agent" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.084305 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-notification-agent" Dec 03 16:42:37 crc kubenswrapper[4768]: E1203 16:42:37.084363 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-central-agent" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.084419 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-central-agent" Dec 03 16:42:37 crc kubenswrapper[4768]: E1203 16:42:37.084473 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="proxy-httpd" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.084533 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="proxy-httpd" Dec 03 16:42:37 crc kubenswrapper[4768]: E1203 16:42:37.084620 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="sg-core" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.084694 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="sg-core" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.084927 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="proxy-httpd" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.085002 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="sg-core" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.085063 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-central-agent" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.085120 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" containerName="ceilometer-notification-agent" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.086890 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.090495 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.090695 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.094611 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.099126 4768 scope.go:117] "RemoveContainer" containerID="b52ff473556cf796fedc2d917d1f301a207e69c4a1ceaad27ccae0b1252d53d1" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.145831 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-scripts\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.145887 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-log-httpd\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.145907 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-run-httpd\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.145925 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.145954 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-config-data\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.145979 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.145996 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99gsf\" (UniqueName: \"kubernetes.io/projected/db72382d-b40a-49c4-93b1-4667366812bd-kube-api-access-99gsf\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.248231 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.248293 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-config-data\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.248337 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.248362 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99gsf\" (UniqueName: \"kubernetes.io/projected/db72382d-b40a-49c4-93b1-4667366812bd-kube-api-access-99gsf\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.248551 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-scripts\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.248614 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-log-httpd\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.248638 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-run-httpd\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.249164 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-run-httpd\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.249233 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-log-httpd\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.254596 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.255941 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-scripts\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.257767 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-config-data\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.271519 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99gsf\" (UniqueName: \"kubernetes.io/projected/db72382d-b40a-49c4-93b1-4667366812bd-kube-api-access-99gsf\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.275188 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.413787 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.566224 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="821ed115-2471-4f5e-8cca-a6a6f35f165f" path="/var/lib/kubelet/pods/821ed115-2471-4f5e-8cca-a6a6f35f165f/volumes" Dec 03 16:42:37 crc kubenswrapper[4768]: W1203 16:42:37.943089 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb72382d_b40a_49c4_93b1_4667366812bd.slice/crio-8eee50067f6b6156523027886ece7636ca2a4e5b52620f5ad2ef53d646013246 WatchSource:0}: Error finding container 8eee50067f6b6156523027886ece7636ca2a4e5b52620f5ad2ef53d646013246: Status 404 returned error can't find the container with id 8eee50067f6b6156523027886ece7636ca2a4e5b52620f5ad2ef53d646013246 Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.948855 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:37 crc kubenswrapper[4768]: I1203 16:42:37.996815 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerStarted","Data":"8eee50067f6b6156523027886ece7636ca2a4e5b52620f5ad2ef53d646013246"} Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.100789 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-vhh2l"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.109196 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.179632 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vhh2l"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.181195 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct4wb\" (UniqueName: \"kubernetes.io/projected/9d848bbe-f5d1-4661-b0fe-77acbc5de436-kube-api-access-ct4wb\") pod \"nova-api-db-create-vhh2l\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.181373 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d848bbe-f5d1-4661-b0fe-77acbc5de436-operator-scripts\") pod \"nova-api-db-create-vhh2l\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.232156 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-b2p9v"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.233858 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.268061 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-b2p9v"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.283798 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7dvt\" (UniqueName: \"kubernetes.io/projected/d76511f3-f6aa-4505-9721-68c83831c1d4-kube-api-access-b7dvt\") pod \"nova-cell0-db-create-b2p9v\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.283857 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d848bbe-f5d1-4661-b0fe-77acbc5de436-operator-scripts\") pod \"nova-api-db-create-vhh2l\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.283950 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct4wb\" (UniqueName: \"kubernetes.io/projected/9d848bbe-f5d1-4661-b0fe-77acbc5de436-kube-api-access-ct4wb\") pod \"nova-api-db-create-vhh2l\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.283985 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d76511f3-f6aa-4505-9721-68c83831c1d4-operator-scripts\") pod \"nova-cell0-db-create-b2p9v\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.284993 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d848bbe-f5d1-4661-b0fe-77acbc5de436-operator-scripts\") pod \"nova-api-db-create-vhh2l\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.292669 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-fff5-account-create-update-vdjxl"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.294028 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.296895 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.306459 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct4wb\" (UniqueName: \"kubernetes.io/projected/9d848bbe-f5d1-4661-b0fe-77acbc5de436-kube-api-access-ct4wb\") pod \"nova-api-db-create-vhh2l\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.316325 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-fff5-account-create-update-vdjxl"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.384683 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-b1ad-account-create-update-64z2j"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.385520 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d76511f3-f6aa-4505-9721-68c83831c1d4-operator-scripts\") pod \"nova-cell0-db-create-b2p9v\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.385567 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-operator-scripts\") pod \"nova-api-fff5-account-create-update-vdjxl\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.385596 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js2wc\" (UniqueName: \"kubernetes.io/projected/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-kube-api-access-js2wc\") pod \"nova-api-fff5-account-create-update-vdjxl\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.385665 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7dvt\" (UniqueName: \"kubernetes.io/projected/d76511f3-f6aa-4505-9721-68c83831c1d4-kube-api-access-b7dvt\") pod \"nova-cell0-db-create-b2p9v\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.386153 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.386502 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d76511f3-f6aa-4505-9721-68c83831c1d4-operator-scripts\") pod \"nova-cell0-db-create-b2p9v\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.389891 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.394475 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b1ad-account-create-update-64z2j"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.406794 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7dvt\" (UniqueName: \"kubernetes.io/projected/d76511f3-f6aa-4505-9721-68c83831c1d4-kube-api-access-b7dvt\") pod \"nova-cell0-db-create-b2p9v\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.474507 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-xqmkm"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.475983 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.481544 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.485097 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xqmkm"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.490319 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-operator-scripts\") pod \"nova-api-fff5-account-create-update-vdjxl\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.490369 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js2wc\" (UniqueName: \"kubernetes.io/projected/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-kube-api-access-js2wc\") pod \"nova-api-fff5-account-create-update-vdjxl\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.490459 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h94l5\" (UniqueName: \"kubernetes.io/projected/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-kube-api-access-h94l5\") pod \"nova-cell0-b1ad-account-create-update-64z2j\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.490509 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-operator-scripts\") pod \"nova-cell0-b1ad-account-create-update-64z2j\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.491214 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-operator-scripts\") pod \"nova-api-fff5-account-create-update-vdjxl\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.511239 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js2wc\" (UniqueName: \"kubernetes.io/projected/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-kube-api-access-js2wc\") pod \"nova-api-fff5-account-create-update-vdjxl\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.558902 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.584747 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-6ef0-account-create-update-hxsjh"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.586687 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.588701 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.592721 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h94l5\" (UniqueName: \"kubernetes.io/projected/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-kube-api-access-h94l5\") pod \"nova-cell0-b1ad-account-create-update-64z2j\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.592782 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45c7dcf6-60c1-4dce-b656-20da30e0414f-operator-scripts\") pod \"nova-cell1-db-create-xqmkm\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.592808 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-operator-scripts\") pod \"nova-cell0-b1ad-account-create-update-64z2j\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.592932 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p67zc\" (UniqueName: \"kubernetes.io/projected/45c7dcf6-60c1-4dce-b656-20da30e0414f-kube-api-access-p67zc\") pod \"nova-cell1-db-create-xqmkm\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.594373 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-operator-scripts\") pod \"nova-cell0-b1ad-account-create-update-64z2j\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.600578 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6ef0-account-create-update-hxsjh"] Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.609944 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h94l5\" (UniqueName: \"kubernetes.io/projected/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-kube-api-access-h94l5\") pod \"nova-cell0-b1ad-account-create-update-64z2j\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.695756 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d1a50ae-6bca-4c21-a19c-840c488991fe-operator-scripts\") pod \"nova-cell1-6ef0-account-create-update-hxsjh\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.695819 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45c7dcf6-60c1-4dce-b656-20da30e0414f-operator-scripts\") pod \"nova-cell1-db-create-xqmkm\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.695852 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cj2t\" (UniqueName: \"kubernetes.io/projected/5d1a50ae-6bca-4c21-a19c-840c488991fe-kube-api-access-7cj2t\") pod \"nova-cell1-6ef0-account-create-update-hxsjh\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.695977 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p67zc\" (UniqueName: \"kubernetes.io/projected/45c7dcf6-60c1-4dce-b656-20da30e0414f-kube-api-access-p67zc\") pod \"nova-cell1-db-create-xqmkm\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.697106 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45c7dcf6-60c1-4dce-b656-20da30e0414f-operator-scripts\") pod \"nova-cell1-db-create-xqmkm\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.711224 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.742808 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p67zc\" (UniqueName: \"kubernetes.io/projected/45c7dcf6-60c1-4dce-b656-20da30e0414f-kube-api-access-p67zc\") pod \"nova-cell1-db-create-xqmkm\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.774023 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.795424 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.797903 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d1a50ae-6bca-4c21-a19c-840c488991fe-operator-scripts\") pod \"nova-cell1-6ef0-account-create-update-hxsjh\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.797960 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cj2t\" (UniqueName: \"kubernetes.io/projected/5d1a50ae-6bca-4c21-a19c-840c488991fe-kube-api-access-7cj2t\") pod \"nova-cell1-6ef0-account-create-update-hxsjh\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.798839 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d1a50ae-6bca-4c21-a19c-840c488991fe-operator-scripts\") pod \"nova-cell1-6ef0-account-create-update-hxsjh\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.821280 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cj2t\" (UniqueName: \"kubernetes.io/projected/5d1a50ae-6bca-4c21-a19c-840c488991fe-kube-api-access-7cj2t\") pod \"nova-cell1-6ef0-account-create-update-hxsjh\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:38 crc kubenswrapper[4768]: I1203 16:42:38.906329 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.030677 4768 generic.go:334] "Generic (PLEG): container finished" podID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerID="6897e6021f9255f3f9879dacaa346be1f440258d4c1c009e12601d680bb41605" exitCode=0 Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.030748 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7126bbaa-d9d7-41db-aafa-8b783cd992fb","Type":"ContainerDied","Data":"6897e6021f9255f3f9879dacaa346be1f440258d4c1c009e12601d680bb41605"} Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.030815 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7126bbaa-d9d7-41db-aafa-8b783cd992fb","Type":"ContainerDied","Data":"e73804da5c87bd1f56f1bb7dd3b95c32d0a872a437599231f02ea19e19c73ff4"} Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.030826 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e73804da5c87bd1f56f1bb7dd3b95c32d0a872a437599231f02ea19e19c73ff4" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.174858 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.282559 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-vhh2l"] Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.300634 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-b2p9v"] Dec 03 16:42:39 crc kubenswrapper[4768]: W1203 16:42:39.307719 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d848bbe_f5d1_4661_b0fe_77acbc5de436.slice/crio-d825b9db231713b264114c6359dc5b19a4427d1f768292c4e4c75ec93c87621c WatchSource:0}: Error finding container d825b9db231713b264114c6359dc5b19a4427d1f768292c4e4c75ec93c87621c: Status 404 returned error can't find the container with id d825b9db231713b264114c6359dc5b19a4427d1f768292c4e4c75ec93c87621c Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308375 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-combined-ca-bundle\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308463 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-scripts\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308653 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-public-tls-certs\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308678 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-logs\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308703 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-httpd-run\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308813 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308842 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-config-data\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.308944 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9kfd\" (UniqueName: \"kubernetes.io/projected/7126bbaa-d9d7-41db-aafa-8b783cd992fb-kube-api-access-s9kfd\") pod \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\" (UID: \"7126bbaa-d9d7-41db-aafa-8b783cd992fb\") " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.310967 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.311251 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-logs" (OuterVolumeSpecName: "logs") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.315187 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7126bbaa-d9d7-41db-aafa-8b783cd992fb-kube-api-access-s9kfd" (OuterVolumeSpecName: "kube-api-access-s9kfd") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "kube-api-access-s9kfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.316832 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-scripts" (OuterVolumeSpecName: "scripts") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.352104 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099" (OuterVolumeSpecName: "glance") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.366755 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.381527 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.406776 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-config-data" (OuterVolumeSpecName: "config-data") pod "7126bbaa-d9d7-41db-aafa-8b783cd992fb" (UID: "7126bbaa-d9d7-41db-aafa-8b783cd992fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.409308 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422802 4768 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422868 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422884 4768 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7126bbaa-d9d7-41db-aafa-8b783cd992fb-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422927 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") on node \"crc\" " Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422950 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422967 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9kfd\" (UniqueName: \"kubernetes.io/projected/7126bbaa-d9d7-41db-aafa-8b783cd992fb-kube-api-access-s9kfd\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422980 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.422995 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7126bbaa-d9d7-41db-aafa-8b783cd992fb-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.475409 4768 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.475836 4768 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099") on node "crc" Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.524809 4768 reconciler_common.go:293] "Volume detached for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:39 crc kubenswrapper[4768]: W1203 16:42:39.583546 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded3f86e6_6517_4b4d_9d1f_d1b43d5ebead.slice/crio-e6b6a7472110ac0367c8623b649409092a9b7af7bdcac9c2db8f3f83ec19a47d WatchSource:0}: Error finding container e6b6a7472110ac0367c8623b649409092a9b7af7bdcac9c2db8f3f83ec19a47d: Status 404 returned error can't find the container with id e6b6a7472110ac0367c8623b649409092a9b7af7bdcac9c2db8f3f83ec19a47d Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.605112 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-b1ad-account-create-update-64z2j"] Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.766522 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-fff5-account-create-update-vdjxl"] Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.853214 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-xqmkm"] Dec 03 16:42:39 crc kubenswrapper[4768]: I1203 16:42:39.873085 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-6ef0-account-create-update-hxsjh"] Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.052852 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xqmkm" event={"ID":"45c7dcf6-60c1-4dce-b656-20da30e0414f","Type":"ContainerStarted","Data":"81be43a1a1e066ff2d08fddd956ba005ed22d6f0be834bd6a298430a94235d32"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.054689 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerStarted","Data":"739a328bd7a6500f77dd7b8c150e27dc1de274acbd9ac476bbf54649ad654a66"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.056084 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" event={"ID":"5d1a50ae-6bca-4c21-a19c-840c488991fe","Type":"ContainerStarted","Data":"7503f12891319164524229a9062dd3a522d62e5abdd63fe3a0c44862c4bba8ac"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.061934 4768 generic.go:334] "Generic (PLEG): container finished" podID="9d848bbe-f5d1-4661-b0fe-77acbc5de436" containerID="74eb5b94d690b42190ddb837fa8a487652471f1a44d693bb112106868d6f37a2" exitCode=0 Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.062057 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vhh2l" event={"ID":"9d848bbe-f5d1-4661-b0fe-77acbc5de436","Type":"ContainerDied","Data":"74eb5b94d690b42190ddb837fa8a487652471f1a44d693bb112106868d6f37a2"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.062096 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vhh2l" event={"ID":"9d848bbe-f5d1-4661-b0fe-77acbc5de436","Type":"ContainerStarted","Data":"d825b9db231713b264114c6359dc5b19a4427d1f768292c4e4c75ec93c87621c"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.063907 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" event={"ID":"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead","Type":"ContainerStarted","Data":"26775996bd1017e47a5e02ab639819be4fe3d1b8d7bef66bcfe5fd60aa1c7361"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.063948 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" event={"ID":"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead","Type":"ContainerStarted","Data":"e6b6a7472110ac0367c8623b649409092a9b7af7bdcac9c2db8f3f83ec19a47d"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.066280 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-fff5-account-create-update-vdjxl" event={"ID":"e8db80e6-2ed4-434a-bb54-a0f7effe70b1","Type":"ContainerStarted","Data":"149bb6d54c017a189a18dd7ab754637e332ed0a23ced529b54522701e912e887"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.067811 4768 generic.go:334] "Generic (PLEG): container finished" podID="d76511f3-f6aa-4505-9721-68c83831c1d4" containerID="c65f9c4ef6606fcdaf6c173b6a0f891dc99b70d2ab6b316549b2e25791b5086a" exitCode=0 Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.067957 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-b2p9v" event={"ID":"d76511f3-f6aa-4505-9721-68c83831c1d4","Type":"ContainerDied","Data":"c65f9c4ef6606fcdaf6c173b6a0f891dc99b70d2ab6b316549b2e25791b5086a"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.067982 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-b2p9v" event={"ID":"d76511f3-f6aa-4505-9721-68c83831c1d4","Type":"ContainerStarted","Data":"0fad3a98ef8535fa03fffc9708576e45671cbcd4983cfca33d1822ad180e7aa3"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.069814 4768 generic.go:334] "Generic (PLEG): container finished" podID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerID="da654315bf738832b22d3d42e152e61599c50831fe17e9585f977103544abf52" exitCode=0 Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.069882 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.069918 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a","Type":"ContainerDied","Data":"da654315bf738832b22d3d42e152e61599c50831fe17e9585f977103544abf52"} Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.116548 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" podStartSLOduration=2.116534401 podStartE2EDuration="2.116534401s" podCreationTimestamp="2025-12-03 16:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:40.106567544 +0000 UTC m=+1457.025903967" watchObservedRunningTime="2025-12-03 16:42:40.116534401 +0000 UTC m=+1457.035870824" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.197400 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.217232 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.232616 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:42:40 crc kubenswrapper[4768]: E1203 16:42:40.233094 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-httpd" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.233107 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-httpd" Dec 03 16:42:40 crc kubenswrapper[4768]: E1203 16:42:40.233121 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-log" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.233128 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-log" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.233329 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-log" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.233343 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" containerName="glance-httpd" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.234462 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.236392 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.236636 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.247328 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.352096 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357453 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357493 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-config-data\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357526 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkgfs\" (UniqueName: \"kubernetes.io/projected/1821ab39-b12d-4311-a67e-01840cf95a09-kube-api-access-lkgfs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357556 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357576 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1821ab39-b12d-4311-a67e-01840cf95a09-logs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357612 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1821ab39-b12d-4311-a67e-01840cf95a09-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357651 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.357719 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-scripts\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.459110 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-httpd-run\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.459191 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-scripts\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.459358 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-internal-tls-certs\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460045 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460315 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-combined-ca-bundle\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460510 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460538 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbc7r\" (UniqueName: \"kubernetes.io/projected/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-kube-api-access-pbc7r\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460579 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-logs\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460619 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-config-data\") pod \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\" (UID: \"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a\") " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460901 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460938 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1821ab39-b12d-4311-a67e-01840cf95a09-logs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.460973 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1821ab39-b12d-4311-a67e-01840cf95a09-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.461019 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.461062 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-scripts\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.461799 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.461831 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-config-data\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.461868 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkgfs\" (UniqueName: \"kubernetes.io/projected/1821ab39-b12d-4311-a67e-01840cf95a09-kube-api-access-lkgfs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.461968 4768 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.465224 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1821ab39-b12d-4311-a67e-01840cf95a09-logs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.465468 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1821ab39-b12d-4311-a67e-01840cf95a09-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.467732 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-logs" (OuterVolumeSpecName: "logs") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.480275 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.480401 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.482226 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-kube-api-access-pbc7r" (OuterVolumeSpecName: "kube-api-access-pbc7r") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "kube-api-access-pbc7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.486484 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.486529 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2d20f1f07bf733bd5c44955ae3cad3d4468693e76c899493d03847aaee02910e/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.487633 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkgfs\" (UniqueName: \"kubernetes.io/projected/1821ab39-b12d-4311-a67e-01840cf95a09-kube-api-access-lkgfs\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.488454 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-scripts\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.495136 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1821ab39-b12d-4311-a67e-01840cf95a09-config-data\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.499347 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-scripts" (OuterVolumeSpecName: "scripts") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.544193 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dd3abf1d-6ee1-4eac-83f1-c88ebdc51099\") pod \"glance-default-external-api-0\" (UID: \"1821ab39-b12d-4311-a67e-01840cf95a09\") " pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.545315 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a" (OuterVolumeSpecName: "glance") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "pvc-45057964-d335-410f-a814-6d1d79c3091a". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.549821 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.561250 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.564415 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.564443 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.564454 4768 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.564464 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.564493 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") on node \"crc\" " Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.564503 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbc7r\" (UniqueName: \"kubernetes.io/projected/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-kube-api-access-pbc7r\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.570790 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-config-data" (OuterVolumeSpecName: "config-data") pod "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" (UID: "8ffaa6a9-5225-4f3b-9f28-ba5567871e7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.587553 4768 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.587869 4768 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-45057964-d335-410f-a814-6d1d79c3091a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a") on node "crc" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.604127 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.667159 4768 reconciler_common.go:293] "Volume detached for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:40 crc kubenswrapper[4768]: I1203 16:42:40.667195 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.095315 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xqmkm" event={"ID":"45c7dcf6-60c1-4dce-b656-20da30e0414f","Type":"ContainerStarted","Data":"5c8f67b79f1e6dbe203d4a7000ce360cfeb1bab82b13180d19dfad5f70e5d753"} Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.114472 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerStarted","Data":"982f4e1749f93b179fac3a1d4b98f83bead3fdb5582d34151aca86aac1079829"} Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.118166 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" event={"ID":"5d1a50ae-6bca-4c21-a19c-840c488991fe","Type":"ContainerStarted","Data":"71da2bdd8eea1e016ecec5d2b7394388807cb7a76a4c994a598283b866ce2983"} Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.129707 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-xqmkm" podStartSLOduration=3.129686784 podStartE2EDuration="3.129686784s" podCreationTimestamp="2025-12-03 16:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:41.120825091 +0000 UTC m=+1458.040161514" watchObservedRunningTime="2025-12-03 16:42:41.129686784 +0000 UTC m=+1458.049023237" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.133800 4768 generic.go:334] "Generic (PLEG): container finished" podID="ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead" containerID="26775996bd1017e47a5e02ab639819be4fe3d1b8d7bef66bcfe5fd60aa1c7361" exitCode=0 Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.133873 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" event={"ID":"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead","Type":"ContainerDied","Data":"26775996bd1017e47a5e02ab639819be4fe3d1b8d7bef66bcfe5fd60aa1c7361"} Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.137858 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-fff5-account-create-update-vdjxl" event={"ID":"e8db80e6-2ed4-434a-bb54-a0f7effe70b1","Type":"ContainerStarted","Data":"1445b7c1337018071e566eb6dc26208284d56add0481f71ebdef381028ffbdcb"} Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.152491 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.152650 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"8ffaa6a9-5225-4f3b-9f28-ba5567871e7a","Type":"ContainerDied","Data":"f80c2f78555f430bcfd9951c690834ef488b98313c7833726915929e1838d440"} Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.152699 4768 scope.go:117] "RemoveContainer" containerID="da654315bf738832b22d3d42e152e61599c50831fe17e9585f977103544abf52" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.155694 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" podStartSLOduration=3.155672679 podStartE2EDuration="3.155672679s" podCreationTimestamp="2025-12-03 16:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:41.142738888 +0000 UTC m=+1458.062075321" watchObservedRunningTime="2025-12-03 16:42:41.155672679 +0000 UTC m=+1458.075009102" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.219741 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.237991 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-fff5-account-create-update-vdjxl" podStartSLOduration=3.23797093 podStartE2EDuration="3.23797093s" podCreationTimestamp="2025-12-03 16:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:41.171221158 +0000 UTC m=+1458.090557581" watchObservedRunningTime="2025-12-03 16:42:41.23797093 +0000 UTC m=+1458.157307353" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.305243 4768 scope.go:117] "RemoveContainer" containerID="0a91d13771fdc789efbacc4260fe138fd9fa7056af604a8a93b8afceaf9549b7" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.329459 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.341723 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.352693 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:42:41 crc kubenswrapper[4768]: E1203 16:42:41.353429 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-log" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.353448 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-log" Dec 03 16:42:41 crc kubenswrapper[4768]: E1203 16:42:41.353481 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-httpd" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.353487 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-httpd" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.353693 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-log" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.353722 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" containerName="glance-httpd" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.355229 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.363121 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.364076 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.364188 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.496201 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.496276 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8tjd\" (UniqueName: \"kubernetes.io/projected/eeb2f38b-2ae6-408e-815c-5bcd14d35623-kube-api-access-f8tjd\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.496322 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.496396 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb2f38b-2ae6-408e-815c-5bcd14d35623-logs\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.497147 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.497186 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeb2f38b-2ae6-408e-815c-5bcd14d35623-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.497234 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.497278 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.548242 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7126bbaa-d9d7-41db-aafa-8b783cd992fb" path="/var/lib/kubelet/pods/7126bbaa-d9d7-41db-aafa-8b783cd992fb/volumes" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.549099 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ffaa6a9-5225-4f3b-9f28-ba5567871e7a" path="/var/lib/kubelet/pods/8ffaa6a9-5225-4f3b-9f28-ba5567871e7a/volumes" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599455 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599511 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8tjd\" (UniqueName: \"kubernetes.io/projected/eeb2f38b-2ae6-408e-815c-5bcd14d35623-kube-api-access-f8tjd\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599550 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599585 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb2f38b-2ae6-408e-815c-5bcd14d35623-logs\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599742 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599772 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeb2f38b-2ae6-408e-815c-5bcd14d35623-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599818 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.599862 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.600684 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eeb2f38b-2ae6-408e-815c-5bcd14d35623-logs\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.603439 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/eeb2f38b-2ae6-408e-815c-5bcd14d35623-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.610083 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-scripts\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.610983 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.612452 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.612541 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/47b06e618a5b6a9be0e4e66414c4e7ce54d0762cbbe4888f533ae97371202fb7/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.627496 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.629074 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8tjd\" (UniqueName: \"kubernetes.io/projected/eeb2f38b-2ae6-408e-815c-5bcd14d35623-kube-api-access-f8tjd\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.658269 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eeb2f38b-2ae6-408e-815c-5bcd14d35623-config-data\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.734140 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-45057964-d335-410f-a814-6d1d79c3091a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-45057964-d335-410f-a814-6d1d79c3091a\") pod \"glance-default-internal-api-0\" (UID: \"eeb2f38b-2ae6-408e-815c-5bcd14d35623\") " pod="openstack/glance-default-internal-api-0" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.907211 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.923136 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:41 crc kubenswrapper[4768]: I1203 16:42:41.983730 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.011623 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d848bbe-f5d1-4661-b0fe-77acbc5de436-operator-scripts\") pod \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.011707 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7dvt\" (UniqueName: \"kubernetes.io/projected/d76511f3-f6aa-4505-9721-68c83831c1d4-kube-api-access-b7dvt\") pod \"d76511f3-f6aa-4505-9721-68c83831c1d4\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.011755 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d76511f3-f6aa-4505-9721-68c83831c1d4-operator-scripts\") pod \"d76511f3-f6aa-4505-9721-68c83831c1d4\" (UID: \"d76511f3-f6aa-4505-9721-68c83831c1d4\") " Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.011795 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct4wb\" (UniqueName: \"kubernetes.io/projected/9d848bbe-f5d1-4661-b0fe-77acbc5de436-kube-api-access-ct4wb\") pod \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\" (UID: \"9d848bbe-f5d1-4661-b0fe-77acbc5de436\") " Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.012361 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d848bbe-f5d1-4661-b0fe-77acbc5de436-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9d848bbe-f5d1-4661-b0fe-77acbc5de436" (UID: "9d848bbe-f5d1-4661-b0fe-77acbc5de436"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.013300 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d76511f3-f6aa-4505-9721-68c83831c1d4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d76511f3-f6aa-4505-9721-68c83831c1d4" (UID: "d76511f3-f6aa-4505-9721-68c83831c1d4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.016209 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d76511f3-f6aa-4505-9721-68c83831c1d4-kube-api-access-b7dvt" (OuterVolumeSpecName: "kube-api-access-b7dvt") pod "d76511f3-f6aa-4505-9721-68c83831c1d4" (UID: "d76511f3-f6aa-4505-9721-68c83831c1d4"). InnerVolumeSpecName "kube-api-access-b7dvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.018047 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d848bbe-f5d1-4661-b0fe-77acbc5de436-kube-api-access-ct4wb" (OuterVolumeSpecName: "kube-api-access-ct4wb") pod "9d848bbe-f5d1-4661-b0fe-77acbc5de436" (UID: "9d848bbe-f5d1-4661-b0fe-77acbc5de436"). InnerVolumeSpecName "kube-api-access-ct4wb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.124797 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d848bbe-f5d1-4661-b0fe-77acbc5de436-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.124831 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7dvt\" (UniqueName: \"kubernetes.io/projected/d76511f3-f6aa-4505-9721-68c83831c1d4-kube-api-access-b7dvt\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.124847 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d76511f3-f6aa-4505-9721-68c83831c1d4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.124860 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct4wb\" (UniqueName: \"kubernetes.io/projected/9d848bbe-f5d1-4661-b0fe-77acbc5de436-kube-api-access-ct4wb\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.166384 4768 generic.go:334] "Generic (PLEG): container finished" podID="45c7dcf6-60c1-4dce-b656-20da30e0414f" containerID="5c8f67b79f1e6dbe203d4a7000ce360cfeb1bab82b13180d19dfad5f70e5d753" exitCode=0 Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.166875 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xqmkm" event={"ID":"45c7dcf6-60c1-4dce-b656-20da30e0414f","Type":"ContainerDied","Data":"5c8f67b79f1e6dbe203d4a7000ce360cfeb1bab82b13180d19dfad5f70e5d753"} Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.170183 4768 generic.go:334] "Generic (PLEG): container finished" podID="5d1a50ae-6bca-4c21-a19c-840c488991fe" containerID="71da2bdd8eea1e016ecec5d2b7394388807cb7a76a4c994a598283b866ce2983" exitCode=0 Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.170232 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" event={"ID":"5d1a50ae-6bca-4c21-a19c-840c488991fe","Type":"ContainerDied","Data":"71da2bdd8eea1e016ecec5d2b7394388807cb7a76a4c994a598283b866ce2983"} Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.173835 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-vhh2l" event={"ID":"9d848bbe-f5d1-4661-b0fe-77acbc5de436","Type":"ContainerDied","Data":"d825b9db231713b264114c6359dc5b19a4427d1f768292c4e4c75ec93c87621c"} Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.173857 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d825b9db231713b264114c6359dc5b19a4427d1f768292c4e4c75ec93c87621c" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.173898 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-vhh2l" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.188869 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1821ab39-b12d-4311-a67e-01840cf95a09","Type":"ContainerStarted","Data":"371342b2675098cddcc88976b04e0e735d0027e80dfdea6ebcdf624b33d8e72f"} Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.196956 4768 generic.go:334] "Generic (PLEG): container finished" podID="e8db80e6-2ed4-434a-bb54-a0f7effe70b1" containerID="1445b7c1337018071e566eb6dc26208284d56add0481f71ebdef381028ffbdcb" exitCode=0 Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.197017 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-fff5-account-create-update-vdjxl" event={"ID":"e8db80e6-2ed4-434a-bb54-a0f7effe70b1","Type":"ContainerDied","Data":"1445b7c1337018071e566eb6dc26208284d56add0481f71ebdef381028ffbdcb"} Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.199967 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-b2p9v" event={"ID":"d76511f3-f6aa-4505-9721-68c83831c1d4","Type":"ContainerDied","Data":"0fad3a98ef8535fa03fffc9708576e45671cbcd4983cfca33d1822ad180e7aa3"} Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.199988 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fad3a98ef8535fa03fffc9708576e45671cbcd4983cfca33d1822ad180e7aa3" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.200037 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-b2p9v" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.568654 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.597946 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.635512 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h94l5\" (UniqueName: \"kubernetes.io/projected/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-kube-api-access-h94l5\") pod \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.635653 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-operator-scripts\") pod \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\" (UID: \"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead\") " Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.636534 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead" (UID: "ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.638357 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.640658 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-kube-api-access-h94l5" (OuterVolumeSpecName: "kube-api-access-h94l5") pod "ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead" (UID: "ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead"). InnerVolumeSpecName "kube-api-access-h94l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:42 crc kubenswrapper[4768]: I1203 16:42:42.740864 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h94l5\" (UniqueName: \"kubernetes.io/projected/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead-kube-api-access-h94l5\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.232913 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerStarted","Data":"65fd36ce0da7d114458e55c855bb0f3aaf2f3e10e9a02fc030f810f3c4a5810f"} Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.236177 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1821ab39-b12d-4311-a67e-01840cf95a09","Type":"ContainerStarted","Data":"4c57901fa897c75e195a92103b7cee1a33070ff7b9c739367a10d99a6267a731"} Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.236206 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"1821ab39-b12d-4311-a67e-01840cf95a09","Type":"ContainerStarted","Data":"f71be45958107f8012111f69f618ba612dfc8829ded8f4cc4eae5d4ddfd37b9c"} Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.242576 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" event={"ID":"ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead","Type":"ContainerDied","Data":"e6b6a7472110ac0367c8623b649409092a9b7af7bdcac9c2db8f3f83ec19a47d"} Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.242633 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6b6a7472110ac0367c8623b649409092a9b7af7bdcac9c2db8f3f83ec19a47d" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.242695 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-b1ad-account-create-update-64z2j" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.252851 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeb2f38b-2ae6-408e-815c-5bcd14d35623","Type":"ContainerStarted","Data":"bc8753d2ee665e40b77f507c4889cd99ff0180bcf5c7c941da00433e5fc77294"} Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.252899 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeb2f38b-2ae6-408e-815c-5bcd14d35623","Type":"ContainerStarted","Data":"174384cad887a1af762553ccb7e2a68e75144e524c765d2b498927f6bcd4eab4"} Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.287226 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.287204605 podStartE2EDuration="3.287204605s" podCreationTimestamp="2025-12-03 16:42:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:43.272506765 +0000 UTC m=+1460.191843238" watchObservedRunningTime="2025-12-03 16:42:43.287204605 +0000 UTC m=+1460.206541028" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.723899 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.886256 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d1a50ae-6bca-4c21-a19c-840c488991fe-operator-scripts\") pod \"5d1a50ae-6bca-4c21-a19c-840c488991fe\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.886648 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cj2t\" (UniqueName: \"kubernetes.io/projected/5d1a50ae-6bca-4c21-a19c-840c488991fe-kube-api-access-7cj2t\") pod \"5d1a50ae-6bca-4c21-a19c-840c488991fe\" (UID: \"5d1a50ae-6bca-4c21-a19c-840c488991fe\") " Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.889694 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d1a50ae-6bca-4c21-a19c-840c488991fe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5d1a50ae-6bca-4c21-a19c-840c488991fe" (UID: "5d1a50ae-6bca-4c21-a19c-840c488991fe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.893728 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d1a50ae-6bca-4c21-a19c-840c488991fe-kube-api-access-7cj2t" (OuterVolumeSpecName: "kube-api-access-7cj2t") pod "5d1a50ae-6bca-4c21-a19c-840c488991fe" (UID: "5d1a50ae-6bca-4c21-a19c-840c488991fe"). InnerVolumeSpecName "kube-api-access-7cj2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.918932 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.920067 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-58c4c66bd9-w8lwh" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.990213 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d1a50ae-6bca-4c21-a19c-840c488991fe-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.990240 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cj2t\" (UniqueName: \"kubernetes.io/projected/5d1a50ae-6bca-4c21-a19c-840c488991fe-kube-api-access-7cj2t\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:43 crc kubenswrapper[4768]: I1203 16:42:43.991377 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.006570 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.091301 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-operator-scripts\") pod \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.091621 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js2wc\" (UniqueName: \"kubernetes.io/projected/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-kube-api-access-js2wc\") pod \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\" (UID: \"e8db80e6-2ed4-434a-bb54-a0f7effe70b1\") " Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.093904 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e8db80e6-2ed4-434a-bb54-a0f7effe70b1" (UID: "e8db80e6-2ed4-434a-bb54-a0f7effe70b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.099845 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-kube-api-access-js2wc" (OuterVolumeSpecName: "kube-api-access-js2wc") pod "e8db80e6-2ed4-434a-bb54-a0f7effe70b1" (UID: "e8db80e6-2ed4-434a-bb54-a0f7effe70b1"). InnerVolumeSpecName "kube-api-access-js2wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.193327 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45c7dcf6-60c1-4dce-b656-20da30e0414f-operator-scripts\") pod \"45c7dcf6-60c1-4dce-b656-20da30e0414f\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.193389 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p67zc\" (UniqueName: \"kubernetes.io/projected/45c7dcf6-60c1-4dce-b656-20da30e0414f-kube-api-access-p67zc\") pod \"45c7dcf6-60c1-4dce-b656-20da30e0414f\" (UID: \"45c7dcf6-60c1-4dce-b656-20da30e0414f\") " Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.194187 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.194214 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js2wc\" (UniqueName: \"kubernetes.io/projected/e8db80e6-2ed4-434a-bb54-a0f7effe70b1-kube-api-access-js2wc\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.204555 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45c7dcf6-60c1-4dce-b656-20da30e0414f-kube-api-access-p67zc" (OuterVolumeSpecName: "kube-api-access-p67zc") pod "45c7dcf6-60c1-4dce-b656-20da30e0414f" (UID: "45c7dcf6-60c1-4dce-b656-20da30e0414f"). InnerVolumeSpecName "kube-api-access-p67zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.205445 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45c7dcf6-60c1-4dce-b656-20da30e0414f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "45c7dcf6-60c1-4dce-b656-20da30e0414f" (UID: "45c7dcf6-60c1-4dce-b656-20da30e0414f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.271971 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"eeb2f38b-2ae6-408e-815c-5bcd14d35623","Type":"ContainerStarted","Data":"2f0266c0bac280b31e40093c7180b391fa6c874f62f3e68b141c2a3f4af36c13"} Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.276566 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-xqmkm" event={"ID":"45c7dcf6-60c1-4dce-b656-20da30e0414f","Type":"ContainerDied","Data":"81be43a1a1e066ff2d08fddd956ba005ed22d6f0be834bd6a298430a94235d32"} Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.276669 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="81be43a1a1e066ff2d08fddd956ba005ed22d6f0be834bd6a298430a94235d32" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.276714 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-xqmkm" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.283785 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerStarted","Data":"daadf6f9baf40a39c5d2baf871a6ab4674bfa8e191cc69184203af8f06b0ead5"} Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.284002 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-central-agent" containerID="cri-o://739a328bd7a6500f77dd7b8c150e27dc1de274acbd9ac476bbf54649ad654a66" gracePeriod=30 Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.284128 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.284180 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="proxy-httpd" containerID="cri-o://daadf6f9baf40a39c5d2baf871a6ab4674bfa8e191cc69184203af8f06b0ead5" gracePeriod=30 Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.284245 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="sg-core" containerID="cri-o://65fd36ce0da7d114458e55c855bb0f3aaf2f3e10e9a02fc030f810f3c4a5810f" gracePeriod=30 Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.284301 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-notification-agent" containerID="cri-o://982f4e1749f93b179fac3a1d4b98f83bead3fdb5582d34151aca86aac1079829" gracePeriod=30 Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.294250 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" event={"ID":"5d1a50ae-6bca-4c21-a19c-840c488991fe","Type":"ContainerDied","Data":"7503f12891319164524229a9062dd3a522d62e5abdd63fe3a0c44862c4bba8ac"} Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.294293 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7503f12891319164524229a9062dd3a522d62e5abdd63fe3a0c44862c4bba8ac" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.294363 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-6ef0-account-create-update-hxsjh" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.295691 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p67zc\" (UniqueName: \"kubernetes.io/projected/45c7dcf6-60c1-4dce-b656-20da30e0414f-kube-api-access-p67zc\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.295714 4768 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/45c7dcf6-60c1-4dce-b656-20da30e0414f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.305149 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-fff5-account-create-update-vdjxl" event={"ID":"e8db80e6-2ed4-434a-bb54-a0f7effe70b1","Type":"ContainerDied","Data":"149bb6d54c017a189a18dd7ab754637e332ed0a23ced529b54522701e912e887"} Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.305221 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="149bb6d54c017a189a18dd7ab754637e332ed0a23ced529b54522701e912e887" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.305296 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-fff5-account-create-update-vdjxl" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.307240 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.307212167 podStartE2EDuration="3.307212167s" podCreationTimestamp="2025-12-03 16:42:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:42:44.296263659 +0000 UTC m=+1461.215600072" watchObservedRunningTime="2025-12-03 16:42:44.307212167 +0000 UTC m=+1461.226548580" Dec 03 16:42:44 crc kubenswrapper[4768]: I1203 16:42:44.332050 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.873122317 podStartE2EDuration="7.332027777s" podCreationTimestamp="2025-12-03 16:42:37 +0000 UTC" firstStartedPulling="2025-12-03 16:42:37.945153457 +0000 UTC m=+1454.864489880" lastFinishedPulling="2025-12-03 16:42:43.404058907 +0000 UTC m=+1460.323395340" observedRunningTime="2025-12-03 16:42:44.322323656 +0000 UTC m=+1461.241660079" watchObservedRunningTime="2025-12-03 16:42:44.332027777 +0000 UTC m=+1461.251364200" Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.315902 4768 generic.go:334] "Generic (PLEG): container finished" podID="db72382d-b40a-49c4-93b1-4667366812bd" containerID="daadf6f9baf40a39c5d2baf871a6ab4674bfa8e191cc69184203af8f06b0ead5" exitCode=0 Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.316134 4768 generic.go:334] "Generic (PLEG): container finished" podID="db72382d-b40a-49c4-93b1-4667366812bd" containerID="65fd36ce0da7d114458e55c855bb0f3aaf2f3e10e9a02fc030f810f3c4a5810f" exitCode=2 Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.315983 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerDied","Data":"daadf6f9baf40a39c5d2baf871a6ab4674bfa8e191cc69184203af8f06b0ead5"} Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.316173 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerDied","Data":"65fd36ce0da7d114458e55c855bb0f3aaf2f3e10e9a02fc030f810f3c4a5810f"} Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.316188 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerDied","Data":"982f4e1749f93b179fac3a1d4b98f83bead3fdb5582d34151aca86aac1079829"} Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.316143 4768 generic.go:334] "Generic (PLEG): container finished" podID="db72382d-b40a-49c4-93b1-4667366812bd" containerID="982f4e1749f93b179fac3a1d4b98f83bead3fdb5582d34151aca86aac1079829" exitCode=0 Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.316210 4768 generic.go:334] "Generic (PLEG): container finished" podID="db72382d-b40a-49c4-93b1-4667366812bd" containerID="739a328bd7a6500f77dd7b8c150e27dc1de274acbd9ac476bbf54649ad654a66" exitCode=0 Dec 03 16:42:45 crc kubenswrapper[4768]: I1203 16:42:45.317076 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerDied","Data":"739a328bd7a6500f77dd7b8c150e27dc1de274acbd9ac476bbf54649ad654a66"} Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.082734 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.251397 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-sg-core-conf-yaml\") pod \"db72382d-b40a-49c4-93b1-4667366812bd\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.251477 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-log-httpd\") pod \"db72382d-b40a-49c4-93b1-4667366812bd\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.251596 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-config-data\") pod \"db72382d-b40a-49c4-93b1-4667366812bd\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.251668 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99gsf\" (UniqueName: \"kubernetes.io/projected/db72382d-b40a-49c4-93b1-4667366812bd-kube-api-access-99gsf\") pod \"db72382d-b40a-49c4-93b1-4667366812bd\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.251727 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-combined-ca-bundle\") pod \"db72382d-b40a-49c4-93b1-4667366812bd\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.251798 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-run-httpd\") pod \"db72382d-b40a-49c4-93b1-4667366812bd\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.251842 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-scripts\") pod \"db72382d-b40a-49c4-93b1-4667366812bd\" (UID: \"db72382d-b40a-49c4-93b1-4667366812bd\") " Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.252117 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "db72382d-b40a-49c4-93b1-4667366812bd" (UID: "db72382d-b40a-49c4-93b1-4667366812bd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.252155 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "db72382d-b40a-49c4-93b1-4667366812bd" (UID: "db72382d-b40a-49c4-93b1-4667366812bd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.252704 4768 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.252724 4768 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db72382d-b40a-49c4-93b1-4667366812bd-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.261737 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-scripts" (OuterVolumeSpecName: "scripts") pod "db72382d-b40a-49c4-93b1-4667366812bd" (UID: "db72382d-b40a-49c4-93b1-4667366812bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.261789 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db72382d-b40a-49c4-93b1-4667366812bd-kube-api-access-99gsf" (OuterVolumeSpecName: "kube-api-access-99gsf") pod "db72382d-b40a-49c4-93b1-4667366812bd" (UID: "db72382d-b40a-49c4-93b1-4667366812bd"). InnerVolumeSpecName "kube-api-access-99gsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.279342 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "db72382d-b40a-49c4-93b1-4667366812bd" (UID: "db72382d-b40a-49c4-93b1-4667366812bd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.330159 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"db72382d-b40a-49c4-93b1-4667366812bd","Type":"ContainerDied","Data":"8eee50067f6b6156523027886ece7636ca2a4e5b52620f5ad2ef53d646013246"} Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.330224 4768 scope.go:117] "RemoveContainer" containerID="daadf6f9baf40a39c5d2baf871a6ab4674bfa8e191cc69184203af8f06b0ead5" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.330395 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.354691 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99gsf\" (UniqueName: \"kubernetes.io/projected/db72382d-b40a-49c4-93b1-4667366812bd-kube-api-access-99gsf\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.354723 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.354731 4768 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.354855 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db72382d-b40a-49c4-93b1-4667366812bd" (UID: "db72382d-b40a-49c4-93b1-4667366812bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.380683 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-config-data" (OuterVolumeSpecName: "config-data") pod "db72382d-b40a-49c4-93b1-4667366812bd" (UID: "db72382d-b40a-49c4-93b1-4667366812bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.439800 4768 scope.go:117] "RemoveContainer" containerID="65fd36ce0da7d114458e55c855bb0f3aaf2f3e10e9a02fc030f810f3c4a5810f" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.456227 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.456258 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db72382d-b40a-49c4-93b1-4667366812bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.471129 4768 scope.go:117] "RemoveContainer" containerID="982f4e1749f93b179fac3a1d4b98f83bead3fdb5582d34151aca86aac1079829" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.495514 4768 scope.go:117] "RemoveContainer" containerID="739a328bd7a6500f77dd7b8c150e27dc1de274acbd9ac476bbf54649ad654a66" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.675341 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.688878 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717281 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717691 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d76511f3-f6aa-4505-9721-68c83831c1d4" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717708 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="d76511f3-f6aa-4505-9721-68c83831c1d4" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717719 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="sg-core" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717725 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="sg-core" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717740 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717746 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717756 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d1a50ae-6bca-4c21-a19c-840c488991fe" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717762 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d1a50ae-6bca-4c21-a19c-840c488991fe" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717772 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8db80e6-2ed4-434a-bb54-a0f7effe70b1" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717779 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8db80e6-2ed4-434a-bb54-a0f7effe70b1" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717801 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="proxy-httpd" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717816 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="proxy-httpd" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717829 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-notification-agent" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717835 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-notification-agent" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717855 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45c7dcf6-60c1-4dce-b656-20da30e0414f" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717861 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="45c7dcf6-60c1-4dce-b656-20da30e0414f" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717871 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d848bbe-f5d1-4661-b0fe-77acbc5de436" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717877 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d848bbe-f5d1-4661-b0fe-77acbc5de436" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: E1203 16:42:46.717890 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-central-agent" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.717895 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-central-agent" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718067 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="proxy-httpd" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718078 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="45c7dcf6-60c1-4dce-b656-20da30e0414f" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718085 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8db80e6-2ed4-434a-bb54-a0f7effe70b1" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718097 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718103 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="d76511f3-f6aa-4505-9721-68c83831c1d4" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718125 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d848bbe-f5d1-4661-b0fe-77acbc5de436" containerName="mariadb-database-create" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718137 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-notification-agent" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718144 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d1a50ae-6bca-4c21-a19c-840c488991fe" containerName="mariadb-account-create-update" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718150 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="sg-core" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.718161 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="db72382d-b40a-49c4-93b1-4667366812bd" containerName="ceilometer-central-agent" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.719930 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.722620 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.729634 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.741964 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.868310 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.868389 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-scripts\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.868416 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccd8l\" (UniqueName: \"kubernetes.io/projected/32afaa44-9282-4409-a10f-ee4b1764ff5d-kube-api-access-ccd8l\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.868445 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-config-data\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.868576 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.868655 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-run-httpd\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.868715 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-log-httpd\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.970833 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccd8l\" (UniqueName: \"kubernetes.io/projected/32afaa44-9282-4409-a10f-ee4b1764ff5d-kube-api-access-ccd8l\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.970892 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-config-data\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.970942 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.970979 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-run-httpd\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.971040 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-log-httpd\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.971074 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.971125 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-scripts\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.971633 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-run-httpd\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.971707 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-log-httpd\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.975267 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.976785 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-config-data\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.980267 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.980520 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-scripts\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:46 crc kubenswrapper[4768]: I1203 16:42:46.993364 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccd8l\" (UniqueName: \"kubernetes.io/projected/32afaa44-9282-4409-a10f-ee4b1764ff5d-kube-api-access-ccd8l\") pod \"ceilometer-0\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " pod="openstack/ceilometer-0" Dec 03 16:42:47 crc kubenswrapper[4768]: I1203 16:42:47.042663 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:42:47 crc kubenswrapper[4768]: I1203 16:42:47.559433 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db72382d-b40a-49c4-93b1-4667366812bd" path="/var/lib/kubelet/pods/db72382d-b40a-49c4-93b1-4667366812bd/volumes" Dec 03 16:42:47 crc kubenswrapper[4768]: I1203 16:42:47.597300 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:42:47 crc kubenswrapper[4768]: W1203 16:42:47.603790 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32afaa44_9282_4409_a10f_ee4b1764ff5d.slice/crio-919f2b3817948274bcec10225cad69c8834e9d081361d9138b1443316a167492 WatchSource:0}: Error finding container 919f2b3817948274bcec10225cad69c8834e9d081361d9138b1443316a167492: Status 404 returned error can't find the container with id 919f2b3817948274bcec10225cad69c8834e9d081361d9138b1443316a167492 Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.358047 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerStarted","Data":"919f2b3817948274bcec10225cad69c8834e9d081361d9138b1443316a167492"} Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.784175 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6x6hp"] Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.788035 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.803073 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.803194 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.803347 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-k4fsr" Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.812332 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6x6hp"] Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.940532 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.940922 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-config-data\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.940984 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-scripts\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:48 crc kubenswrapper[4768]: I1203 16:42:48.941042 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7jxz\" (UniqueName: \"kubernetes.io/projected/cfb313e1-4e5d-40b8-a882-82239fe7ae17-kube-api-access-l7jxz\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.043234 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7jxz\" (UniqueName: \"kubernetes.io/projected/cfb313e1-4e5d-40b8-a882-82239fe7ae17-kube-api-access-l7jxz\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.043424 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.043533 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-config-data\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.044881 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-scripts\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.050411 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-config-data\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.050954 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-scripts\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.051331 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.065282 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7jxz\" (UniqueName: \"kubernetes.io/projected/cfb313e1-4e5d-40b8-a882-82239fe7ae17-kube-api-access-l7jxz\") pod \"nova-cell0-conductor-db-sync-6x6hp\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.260723 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.385656 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerStarted","Data":"1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c"} Dec 03 16:42:49 crc kubenswrapper[4768]: I1203 16:42:49.751079 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6x6hp"] Dec 03 16:42:50 crc kubenswrapper[4768]: I1203 16:42:50.398280 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" event={"ID":"cfb313e1-4e5d-40b8-a882-82239fe7ae17","Type":"ContainerStarted","Data":"1711967dea3681a14f915810090cff66141d4fc11442a498db665100753b2717"} Dec 03 16:42:50 crc kubenswrapper[4768]: I1203 16:42:50.403718 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerStarted","Data":"8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916"} Dec 03 16:42:50 crc kubenswrapper[4768]: I1203 16:42:50.605249 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:42:50 crc kubenswrapper[4768]: I1203 16:42:50.605297 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 03 16:42:50 crc kubenswrapper[4768]: I1203 16:42:50.656813 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:42:50 crc kubenswrapper[4768]: I1203 16:42:50.698273 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 03 16:42:51 crc kubenswrapper[4768]: I1203 16:42:51.419280 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerStarted","Data":"565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922"} Dec 03 16:42:51 crc kubenswrapper[4768]: I1203 16:42:51.419326 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:42:51 crc kubenswrapper[4768]: I1203 16:42:51.419480 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 03 16:42:51 crc kubenswrapper[4768]: I1203 16:42:51.985616 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:51 crc kubenswrapper[4768]: I1203 16:42:51.985904 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:52 crc kubenswrapper[4768]: I1203 16:42:52.049669 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:52 crc kubenswrapper[4768]: I1203 16:42:52.082066 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:52 crc kubenswrapper[4768]: I1203 16:42:52.427220 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:52 crc kubenswrapper[4768]: I1203 16:42:52.427274 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:53 crc kubenswrapper[4768]: I1203 16:42:53.434389 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:42:53 crc kubenswrapper[4768]: I1203 16:42:53.434805 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.482714 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerStarted","Data":"c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb"} Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.483813 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.509438 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.046385481 podStartE2EDuration="10.50942265s" podCreationTimestamp="2025-12-03 16:42:46 +0000 UTC" firstStartedPulling="2025-12-03 16:42:47.606226924 +0000 UTC m=+1464.525563347" lastFinishedPulling="2025-12-03 16:42:56.069264093 +0000 UTC m=+1472.988600516" observedRunningTime="2025-12-03 16:42:56.504103834 +0000 UTC m=+1473.423440267" watchObservedRunningTime="2025-12-03 16:42:56.50942265 +0000 UTC m=+1473.428759073" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.525307 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.525417 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.676928 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.677078 4768 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.686182 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 03 16:42:56 crc kubenswrapper[4768]: I1203 16:42:56.716523 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 03 16:42:57 crc kubenswrapper[4768]: I1203 16:42:57.141808 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-api-0" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api" probeResult="failure" output="Get \"https://10.217.0.188:8889/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:42:57 crc kubenswrapper[4768]: I1203 16:42:57.141835 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/cloudkitty-api-0" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api" probeResult="failure" output="Get \"https://10.217.0.188:8889/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:43:01 crc kubenswrapper[4768]: I1203 16:43:01.724499 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:01 crc kubenswrapper[4768]: I1203 16:43:01.725284 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-central-agent" containerID="cri-o://1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c" gracePeriod=30 Dec 03 16:43:01 crc kubenswrapper[4768]: I1203 16:43:01.725750 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="proxy-httpd" containerID="cri-o://c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb" gracePeriod=30 Dec 03 16:43:01 crc kubenswrapper[4768]: I1203 16:43:01.725791 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="sg-core" containerID="cri-o://565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922" gracePeriod=30 Dec 03 16:43:01 crc kubenswrapper[4768]: I1203 16:43:01.725822 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-notification-agent" containerID="cri-o://8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916" gracePeriod=30 Dec 03 16:43:01 crc kubenswrapper[4768]: I1203 16:43:01.844276 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 03 16:43:02 crc kubenswrapper[4768]: I1203 16:43:02.552106 4768 generic.go:334] "Generic (PLEG): container finished" podID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerID="c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb" exitCode=0 Dec 03 16:43:02 crc kubenswrapper[4768]: I1203 16:43:02.552420 4768 generic.go:334] "Generic (PLEG): container finished" podID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerID="565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922" exitCode=2 Dec 03 16:43:02 crc kubenswrapper[4768]: I1203 16:43:02.552372 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerDied","Data":"c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb"} Dec 03 16:43:02 crc kubenswrapper[4768]: I1203 16:43:02.552458 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerDied","Data":"565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922"} Dec 03 16:43:03 crc kubenswrapper[4768]: I1203 16:43:03.570843 4768 generic.go:334] "Generic (PLEG): container finished" podID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerID="8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916" exitCode=0 Dec 03 16:43:03 crc kubenswrapper[4768]: I1203 16:43:03.570885 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerDied","Data":"8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916"} Dec 03 16:43:04 crc kubenswrapper[4768]: I1203 16:43:04.580978 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" event={"ID":"cfb313e1-4e5d-40b8-a882-82239fe7ae17","Type":"ContainerStarted","Data":"394e144f1f9efa209bd84a5a938002f2ce150e3b1ba69d23cdadbb701d1f28d8"} Dec 03 16:43:04 crc kubenswrapper[4768]: I1203 16:43:04.603327 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" podStartSLOduration=2.422065729 podStartE2EDuration="16.603310786s" podCreationTimestamp="2025-12-03 16:42:48 +0000 UTC" firstStartedPulling="2025-12-03 16:42:49.751520648 +0000 UTC m=+1466.670857071" lastFinishedPulling="2025-12-03 16:43:03.932765705 +0000 UTC m=+1480.852102128" observedRunningTime="2025-12-03 16:43:04.59624558 +0000 UTC m=+1481.515582003" watchObservedRunningTime="2025-12-03 16:43:04.603310786 +0000 UTC m=+1481.522647209" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.442422 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.612661 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-scripts\") pod \"32afaa44-9282-4409-a10f-ee4b1764ff5d\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.613127 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-log-httpd\") pod \"32afaa44-9282-4409-a10f-ee4b1764ff5d\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.613269 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-config-data\") pod \"32afaa44-9282-4409-a10f-ee4b1764ff5d\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.613500 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "32afaa44-9282-4409-a10f-ee4b1764ff5d" (UID: "32afaa44-9282-4409-a10f-ee4b1764ff5d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.613841 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "32afaa44-9282-4409-a10f-ee4b1764ff5d" (UID: "32afaa44-9282-4409-a10f-ee4b1764ff5d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.613982 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-run-httpd\") pod \"32afaa44-9282-4409-a10f-ee4b1764ff5d\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.614045 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccd8l\" (UniqueName: \"kubernetes.io/projected/32afaa44-9282-4409-a10f-ee4b1764ff5d-kube-api-access-ccd8l\") pod \"32afaa44-9282-4409-a10f-ee4b1764ff5d\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.614480 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-sg-core-conf-yaml\") pod \"32afaa44-9282-4409-a10f-ee4b1764ff5d\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.614533 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-combined-ca-bundle\") pod \"32afaa44-9282-4409-a10f-ee4b1764ff5d\" (UID: \"32afaa44-9282-4409-a10f-ee4b1764ff5d\") " Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.615272 4768 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.615297 4768 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32afaa44-9282-4409-a10f-ee4b1764ff5d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.620010 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-scripts" (OuterVolumeSpecName: "scripts") pod "32afaa44-9282-4409-a10f-ee4b1764ff5d" (UID: "32afaa44-9282-4409-a10f-ee4b1764ff5d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.620030 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32afaa44-9282-4409-a10f-ee4b1764ff5d-kube-api-access-ccd8l" (OuterVolumeSpecName: "kube-api-access-ccd8l") pod "32afaa44-9282-4409-a10f-ee4b1764ff5d" (UID: "32afaa44-9282-4409-a10f-ee4b1764ff5d"). InnerVolumeSpecName "kube-api-access-ccd8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.659147 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "32afaa44-9282-4409-a10f-ee4b1764ff5d" (UID: "32afaa44-9282-4409-a10f-ee4b1764ff5d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.670194 4768 generic.go:334] "Generic (PLEG): container finished" podID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerID="1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c" exitCode=0 Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.670274 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerDied","Data":"1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c"} Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.670311 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"32afaa44-9282-4409-a10f-ee4b1764ff5d","Type":"ContainerDied","Data":"919f2b3817948274bcec10225cad69c8834e9d081361d9138b1443316a167492"} Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.670358 4768 scope.go:117] "RemoveContainer" containerID="c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.670574 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.695661 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32afaa44-9282-4409-a10f-ee4b1764ff5d" (UID: "32afaa44-9282-4409-a10f-ee4b1764ff5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.716472 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.716510 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.716523 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccd8l\" (UniqueName: \"kubernetes.io/projected/32afaa44-9282-4409-a10f-ee4b1764ff5d-kube-api-access-ccd8l\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.716536 4768 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.737935 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-config-data" (OuterVolumeSpecName: "config-data") pod "32afaa44-9282-4409-a10f-ee4b1764ff5d" (UID: "32afaa44-9282-4409-a10f-ee4b1764ff5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.741327 4768 scope.go:117] "RemoveContainer" containerID="565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.760903 4768 scope.go:117] "RemoveContainer" containerID="8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.803939 4768 scope.go:117] "RemoveContainer" containerID="1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.818201 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32afaa44-9282-4409-a10f-ee4b1764ff5d-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.833490 4768 scope.go:117] "RemoveContainer" containerID="c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb" Dec 03 16:43:11 crc kubenswrapper[4768]: E1203 16:43:11.834214 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb\": container with ID starting with c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb not found: ID does not exist" containerID="c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.834258 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb"} err="failed to get container status \"c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb\": rpc error: code = NotFound desc = could not find container \"c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb\": container with ID starting with c034f437e2b4d000fbff224680de4036752982c9d3ba55b9c03a23a094583fbb not found: ID does not exist" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.834300 4768 scope.go:117] "RemoveContainer" containerID="565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922" Dec 03 16:43:11 crc kubenswrapper[4768]: E1203 16:43:11.834895 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922\": container with ID starting with 565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922 not found: ID does not exist" containerID="565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.834935 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922"} err="failed to get container status \"565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922\": rpc error: code = NotFound desc = could not find container \"565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922\": container with ID starting with 565f6dcc4a27e393f716299d149e10a1caee14abad6b2493a49ba79f8a71e922 not found: ID does not exist" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.834966 4768 scope.go:117] "RemoveContainer" containerID="8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916" Dec 03 16:43:11 crc kubenswrapper[4768]: E1203 16:43:11.835268 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916\": container with ID starting with 8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916 not found: ID does not exist" containerID="8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.835288 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916"} err="failed to get container status \"8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916\": rpc error: code = NotFound desc = could not find container \"8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916\": container with ID starting with 8978e7d3b043f8fe4a43bcff9b828e980859d4499a85ff1adef3f9e10bdd3916 not found: ID does not exist" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.835303 4768 scope.go:117] "RemoveContainer" containerID="1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c" Dec 03 16:43:11 crc kubenswrapper[4768]: E1203 16:43:11.835547 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c\": container with ID starting with 1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c not found: ID does not exist" containerID="1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c" Dec 03 16:43:11 crc kubenswrapper[4768]: I1203 16:43:11.835564 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c"} err="failed to get container status \"1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c\": rpc error: code = NotFound desc = could not find container \"1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c\": container with ID starting with 1b5498913d74392d988efa19c9f2d6b64c6b57c52cd28526c78004301a7de44c not found: ID does not exist" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.007084 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.017105 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.044890 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:12 crc kubenswrapper[4768]: E1203 16:43:12.045389 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-central-agent" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.045409 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-central-agent" Dec 03 16:43:12 crc kubenswrapper[4768]: E1203 16:43:12.045429 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-notification-agent" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.045438 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-notification-agent" Dec 03 16:43:12 crc kubenswrapper[4768]: E1203 16:43:12.045473 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="proxy-httpd" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.045482 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="proxy-httpd" Dec 03 16:43:12 crc kubenswrapper[4768]: E1203 16:43:12.045501 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="sg-core" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.045508 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="sg-core" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.045780 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-central-agent" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.045808 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="ceilometer-notification-agent" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.045827 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="proxy-httpd" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.056716 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" containerName="sg-core" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.059089 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.062477 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.068567 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.068876 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.224924 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd9tx\" (UniqueName: \"kubernetes.io/projected/15a8316a-3647-4a14-b046-c74d652eb5fd-kube-api-access-qd9tx\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.225051 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-config-data\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.225069 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.225660 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-run-httpd\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.225694 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-scripts\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.225720 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-log-httpd\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.225791 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328251 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-log-httpd\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328331 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328402 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd9tx\" (UniqueName: \"kubernetes.io/projected/15a8316a-3647-4a14-b046-c74d652eb5fd-kube-api-access-qd9tx\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328510 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-config-data\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328531 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328628 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-run-httpd\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328679 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-scripts\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.328797 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-log-httpd\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.329802 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-run-httpd\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.334303 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.334467 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.338581 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-config-data\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.346323 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-scripts\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.347217 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd9tx\" (UniqueName: \"kubernetes.io/projected/15a8316a-3647-4a14-b046-c74d652eb5fd-kube-api-access-qd9tx\") pod \"ceilometer-0\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.425945 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:12 crc kubenswrapper[4768]: I1203 16:43:12.936462 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:13 crc kubenswrapper[4768]: I1203 16:43:13.550731 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32afaa44-9282-4409-a10f-ee4b1764ff5d" path="/var/lib/kubelet/pods/32afaa44-9282-4409-a10f-ee4b1764ff5d/volumes" Dec 03 16:43:13 crc kubenswrapper[4768]: I1203 16:43:13.704847 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerStarted","Data":"55145c6d6960c59508e0b11c16af4546852c5670e43d0dc9f22c6c79e7172574"} Dec 03 16:43:15 crc kubenswrapper[4768]: I1203 16:43:15.735406 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerStarted","Data":"690c7c10b95e3aa90ae02067e4fdde3408629187645cefea233ca95f18643612"} Dec 03 16:43:16 crc kubenswrapper[4768]: I1203 16:43:16.756181 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerStarted","Data":"c491eddc4ccbd565c6bc80babd20954f63e2a758be25485f311f5ced96753b7f"} Dec 03 16:43:18 crc kubenswrapper[4768]: I1203 16:43:18.653725 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cxp8w"] Dec 03 16:43:18 crc kubenswrapper[4768]: I1203 16:43:18.679811 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:18 crc kubenswrapper[4768]: I1203 16:43:18.699514 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cxp8w"] Dec 03 16:43:19 crc kubenswrapper[4768]: I1203 16:43:19.926170 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-catalog-content\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:19 crc kubenswrapper[4768]: I1203 16:43:19.926324 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-utilities\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:19 crc kubenswrapper[4768]: I1203 16:43:19.926362 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sj9kv\" (UniqueName: \"kubernetes.io/projected/1648c868-22a9-4e98-9d15-b8bc2ca021d6-kube-api-access-sj9kv\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.032688 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-catalog-content\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.032827 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-utilities\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.032864 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sj9kv\" (UniqueName: \"kubernetes.io/projected/1648c868-22a9-4e98-9d15-b8bc2ca021d6-kube-api-access-sj9kv\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.033258 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-catalog-content\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.033446 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-utilities\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.062425 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sj9kv\" (UniqueName: \"kubernetes.io/projected/1648c868-22a9-4e98-9d15-b8bc2ca021d6-kube-api-access-sj9kv\") pod \"redhat-operators-cxp8w\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.215197 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:20 crc kubenswrapper[4768]: W1203 16:43:20.727936 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1648c868_22a9_4e98_9d15_b8bc2ca021d6.slice/crio-10dfd760109caff296e1e00c54c42c6adf50fa6b2909281d4639eaab338bfcff WatchSource:0}: Error finding container 10dfd760109caff296e1e00c54c42c6adf50fa6b2909281d4639eaab338bfcff: Status 404 returned error can't find the container with id 10dfd760109caff296e1e00c54c42c6adf50fa6b2909281d4639eaab338bfcff Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.744855 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cxp8w"] Dec 03 16:43:20 crc kubenswrapper[4768]: I1203 16:43:20.989340 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxp8w" event={"ID":"1648c868-22a9-4e98-9d15-b8bc2ca021d6","Type":"ContainerStarted","Data":"10dfd760109caff296e1e00c54c42c6adf50fa6b2909281d4639eaab338bfcff"} Dec 03 16:43:22 crc kubenswrapper[4768]: I1203 16:43:22.005544 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerStarted","Data":"8b2535213f7524cbca33d47d79aaa2661551613a623e7033bc9135285892df68"} Dec 03 16:43:22 crc kubenswrapper[4768]: I1203 16:43:22.011050 4768 generic.go:334] "Generic (PLEG): container finished" podID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerID="092d7a65b7979f42887b6a0676cfe2827b9accc43d588160afa80d3acbca439a" exitCode=0 Dec 03 16:43:22 crc kubenswrapper[4768]: I1203 16:43:22.011132 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxp8w" event={"ID":"1648c868-22a9-4e98-9d15-b8bc2ca021d6","Type":"ContainerDied","Data":"092d7a65b7979f42887b6a0676cfe2827b9accc43d588160afa80d3acbca439a"} Dec 03 16:43:26 crc kubenswrapper[4768]: I1203 16:43:26.029282 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:43:26 crc kubenswrapper[4768]: I1203 16:43:26.029748 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:43:26 crc kubenswrapper[4768]: I1203 16:43:26.073560 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerStarted","Data":"da969ac16bc8eef58e0885a1655cbf6a8bde9d87f5cca28344406cf4fa2816a2"} Dec 03 16:43:26 crc kubenswrapper[4768]: I1203 16:43:26.073766 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:43:26 crc kubenswrapper[4768]: I1203 16:43:26.079158 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxp8w" event={"ID":"1648c868-22a9-4e98-9d15-b8bc2ca021d6","Type":"ContainerStarted","Data":"c4705eb87ca832f35ed55991704524b1b6e09dd61b942da4ac3d207cc5cfdac5"} Dec 03 16:43:26 crc kubenswrapper[4768]: I1203 16:43:26.106903 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.798027561 podStartE2EDuration="14.106881351s" podCreationTimestamp="2025-12-03 16:43:12 +0000 UTC" firstStartedPulling="2025-12-03 16:43:12.94104585 +0000 UTC m=+1489.860382263" lastFinishedPulling="2025-12-03 16:43:25.24989959 +0000 UTC m=+1502.169236053" observedRunningTime="2025-12-03 16:43:26.094804901 +0000 UTC m=+1503.014141334" watchObservedRunningTime="2025-12-03 16:43:26.106881351 +0000 UTC m=+1503.026217774" Dec 03 16:43:27 crc kubenswrapper[4768]: I1203 16:43:27.864317 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:28 crc kubenswrapper[4768]: I1203 16:43:28.099749 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-central-agent" containerID="cri-o://690c7c10b95e3aa90ae02067e4fdde3408629187645cefea233ca95f18643612" gracePeriod=30 Dec 03 16:43:28 crc kubenswrapper[4768]: I1203 16:43:28.100462 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="proxy-httpd" containerID="cri-o://da969ac16bc8eef58e0885a1655cbf6a8bde9d87f5cca28344406cf4fa2816a2" gracePeriod=30 Dec 03 16:43:28 crc kubenswrapper[4768]: I1203 16:43:28.100522 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="sg-core" containerID="cri-o://8b2535213f7524cbca33d47d79aaa2661551613a623e7033bc9135285892df68" gracePeriod=30 Dec 03 16:43:28 crc kubenswrapper[4768]: I1203 16:43:28.100558 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-notification-agent" containerID="cri-o://c491eddc4ccbd565c6bc80babd20954f63e2a758be25485f311f5ced96753b7f" gracePeriod=30 Dec 03 16:43:29 crc kubenswrapper[4768]: I1203 16:43:29.110773 4768 generic.go:334] "Generic (PLEG): container finished" podID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerID="da969ac16bc8eef58e0885a1655cbf6a8bde9d87f5cca28344406cf4fa2816a2" exitCode=0 Dec 03 16:43:29 crc kubenswrapper[4768]: I1203 16:43:29.111047 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerDied","Data":"da969ac16bc8eef58e0885a1655cbf6a8bde9d87f5cca28344406cf4fa2816a2"} Dec 03 16:43:30 crc kubenswrapper[4768]: I1203 16:43:30.132198 4768 generic.go:334] "Generic (PLEG): container finished" podID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerID="8b2535213f7524cbca33d47d79aaa2661551613a623e7033bc9135285892df68" exitCode=2 Dec 03 16:43:30 crc kubenswrapper[4768]: I1203 16:43:30.132397 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerDied","Data":"8b2535213f7524cbca33d47d79aaa2661551613a623e7033bc9135285892df68"} Dec 03 16:43:31 crc kubenswrapper[4768]: I1203 16:43:31.149202 4768 generic.go:334] "Generic (PLEG): container finished" podID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerID="c4705eb87ca832f35ed55991704524b1b6e09dd61b942da4ac3d207cc5cfdac5" exitCode=0 Dec 03 16:43:31 crc kubenswrapper[4768]: I1203 16:43:31.149306 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxp8w" event={"ID":"1648c868-22a9-4e98-9d15-b8bc2ca021d6","Type":"ContainerDied","Data":"c4705eb87ca832f35ed55991704524b1b6e09dd61b942da4ac3d207cc5cfdac5"} Dec 03 16:43:31 crc kubenswrapper[4768]: I1203 16:43:31.153408 4768 generic.go:334] "Generic (PLEG): container finished" podID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerID="c491eddc4ccbd565c6bc80babd20954f63e2a758be25485f311f5ced96753b7f" exitCode=0 Dec 03 16:43:31 crc kubenswrapper[4768]: I1203 16:43:31.153439 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerDied","Data":"c491eddc4ccbd565c6bc80babd20954f63e2a758be25485f311f5ced96753b7f"} Dec 03 16:43:35 crc kubenswrapper[4768]: I1203 16:43:35.216746 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxp8w" event={"ID":"1648c868-22a9-4e98-9d15-b8bc2ca021d6","Type":"ContainerStarted","Data":"dce853ecadf32ba119cb72021a17c113c24e8ee47935a591a72a85a7f22fd31c"} Dec 03 16:43:35 crc kubenswrapper[4768]: I1203 16:43:35.242413 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cxp8w" podStartSLOduration=5.004793588 podStartE2EDuration="17.24239469s" podCreationTimestamp="2025-12-03 16:43:18 +0000 UTC" firstStartedPulling="2025-12-03 16:43:22.013371557 +0000 UTC m=+1498.932707980" lastFinishedPulling="2025-12-03 16:43:34.250972659 +0000 UTC m=+1511.170309082" observedRunningTime="2025-12-03 16:43:35.235249446 +0000 UTC m=+1512.154585869" watchObservedRunningTime="2025-12-03 16:43:35.24239469 +0000 UTC m=+1512.161731113" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.239218 4768 generic.go:334] "Generic (PLEG): container finished" podID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerID="690c7c10b95e3aa90ae02067e4fdde3408629187645cefea233ca95f18643612" exitCode=0 Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.239779 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerDied","Data":"690c7c10b95e3aa90ae02067e4fdde3408629187645cefea233ca95f18643612"} Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.583849 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.670019 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "15a8316a-3647-4a14-b046-c74d652eb5fd" (UID: "15a8316a-3647-4a14-b046-c74d652eb5fd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.670574 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-log-httpd\") pod \"15a8316a-3647-4a14-b046-c74d652eb5fd\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.670818 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-sg-core-conf-yaml\") pod \"15a8316a-3647-4a14-b046-c74d652eb5fd\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.671811 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qd9tx\" (UniqueName: \"kubernetes.io/projected/15a8316a-3647-4a14-b046-c74d652eb5fd-kube-api-access-qd9tx\") pod \"15a8316a-3647-4a14-b046-c74d652eb5fd\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.671875 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-run-httpd\") pod \"15a8316a-3647-4a14-b046-c74d652eb5fd\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.671963 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-combined-ca-bundle\") pod \"15a8316a-3647-4a14-b046-c74d652eb5fd\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.672001 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-config-data\") pod \"15a8316a-3647-4a14-b046-c74d652eb5fd\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.672055 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-scripts\") pod \"15a8316a-3647-4a14-b046-c74d652eb5fd\" (UID: \"15a8316a-3647-4a14-b046-c74d652eb5fd\") " Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.677260 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "15a8316a-3647-4a14-b046-c74d652eb5fd" (UID: "15a8316a-3647-4a14-b046-c74d652eb5fd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.677333 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15a8316a-3647-4a14-b046-c74d652eb5fd-kube-api-access-qd9tx" (OuterVolumeSpecName: "kube-api-access-qd9tx") pod "15a8316a-3647-4a14-b046-c74d652eb5fd" (UID: "15a8316a-3647-4a14-b046-c74d652eb5fd"). InnerVolumeSpecName "kube-api-access-qd9tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.677589 4768 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.677624 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qd9tx\" (UniqueName: \"kubernetes.io/projected/15a8316a-3647-4a14-b046-c74d652eb5fd-kube-api-access-qd9tx\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.677635 4768 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/15a8316a-3647-4a14-b046-c74d652eb5fd-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.702560 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-scripts" (OuterVolumeSpecName: "scripts") pod "15a8316a-3647-4a14-b046-c74d652eb5fd" (UID: "15a8316a-3647-4a14-b046-c74d652eb5fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.736853 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "15a8316a-3647-4a14-b046-c74d652eb5fd" (UID: "15a8316a-3647-4a14-b046-c74d652eb5fd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.780075 4768 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.780119 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.798165 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-config-data" (OuterVolumeSpecName: "config-data") pod "15a8316a-3647-4a14-b046-c74d652eb5fd" (UID: "15a8316a-3647-4a14-b046-c74d652eb5fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.816976 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15a8316a-3647-4a14-b046-c74d652eb5fd" (UID: "15a8316a-3647-4a14-b046-c74d652eb5fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.881896 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:37 crc kubenswrapper[4768]: I1203 16:43:37.881926 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15a8316a-3647-4a14-b046-c74d652eb5fd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.261134 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"15a8316a-3647-4a14-b046-c74d652eb5fd","Type":"ContainerDied","Data":"55145c6d6960c59508e0b11c16af4546852c5670e43d0dc9f22c6c79e7172574"} Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.261195 4768 scope.go:117] "RemoveContainer" containerID="da969ac16bc8eef58e0885a1655cbf6a8bde9d87f5cca28344406cf4fa2816a2" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.261206 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.288123 4768 scope.go:117] "RemoveContainer" containerID="8b2535213f7524cbca33d47d79aaa2661551613a623e7033bc9135285892df68" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.314217 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.334642 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352031 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:38 crc kubenswrapper[4768]: E1203 16:43:38.352663 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="proxy-httpd" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352681 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="proxy-httpd" Dec 03 16:43:38 crc kubenswrapper[4768]: E1203 16:43:38.352705 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-central-agent" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352713 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-central-agent" Dec 03 16:43:38 crc kubenswrapper[4768]: E1203 16:43:38.352726 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-notification-agent" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352732 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-notification-agent" Dec 03 16:43:38 crc kubenswrapper[4768]: E1203 16:43:38.352738 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="sg-core" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352744 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="sg-core" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352932 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="proxy-httpd" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352944 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-central-agent" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352953 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="sg-core" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.352967 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" containerName="ceilometer-notification-agent" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.354749 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.357335 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.357554 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.363068 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.365662 4768 scope.go:117] "RemoveContainer" containerID="c491eddc4ccbd565c6bc80babd20954f63e2a758be25485f311f5ced96753b7f" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.407193 4768 scope.go:117] "RemoveContainer" containerID="690c7c10b95e3aa90ae02067e4fdde3408629187645cefea233ca95f18643612" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.508810 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-822hj\" (UniqueName: \"kubernetes.io/projected/47238918-7f35-4a33-9439-481a29a5e4c0-kube-api-access-822hj\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.509018 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-run-httpd\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.509125 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.509266 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-scripts\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.509309 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-config-data\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.509411 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.509460 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-log-httpd\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: E1203 16:43:38.530235 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15a8316a_3647_4a14_b046_c74d652eb5fd.slice/crio-55145c6d6960c59508e0b11c16af4546852c5670e43d0dc9f22c6c79e7172574\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15a8316a_3647_4a14_b046_c74d652eb5fd.slice\": RecentStats: unable to find data in memory cache]" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.611664 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-scripts\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.611718 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-config-data\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.611885 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.611920 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-log-httpd\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.612043 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-822hj\" (UniqueName: \"kubernetes.io/projected/47238918-7f35-4a33-9439-481a29a5e4c0-kube-api-access-822hj\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.612151 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-run-httpd\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.612245 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.612655 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-run-httpd\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.613056 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-log-httpd\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.617720 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.626607 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-scripts\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.627210 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.627895 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-config-data\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.629752 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-822hj\" (UniqueName: \"kubernetes.io/projected/47238918-7f35-4a33-9439-481a29a5e4c0-kube-api-access-822hj\") pod \"ceilometer-0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " pod="openstack/ceilometer-0" Dec 03 16:43:38 crc kubenswrapper[4768]: I1203 16:43:38.679975 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:43:39 crc kubenswrapper[4768]: I1203 16:43:39.135090 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:43:39 crc kubenswrapper[4768]: I1203 16:43:39.274030 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerStarted","Data":"8a90119d9f04263292efe3dc21df950b5c2aece9cd0e42df358bc9acae744f9c"} Dec 03 16:43:39 crc kubenswrapper[4768]: I1203 16:43:39.543826 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15a8316a-3647-4a14-b046-c74d652eb5fd" path="/var/lib/kubelet/pods/15a8316a-3647-4a14-b046-c74d652eb5fd/volumes" Dec 03 16:43:40 crc kubenswrapper[4768]: I1203 16:43:40.216758 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:40 crc kubenswrapper[4768]: I1203 16:43:40.216829 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:40 crc kubenswrapper[4768]: I1203 16:43:40.306692 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:40 crc kubenswrapper[4768]: I1203 16:43:40.374962 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:40 crc kubenswrapper[4768]: I1203 16:43:40.557837 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cxp8w"] Dec 03 16:43:41 crc kubenswrapper[4768]: I1203 16:43:41.295706 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerStarted","Data":"990d81437d8ce78ccc501e312e8b92bd1da96095e288c63eadd5b3200a2cbc9d"} Dec 03 16:43:42 crc kubenswrapper[4768]: I1203 16:43:42.308856 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cxp8w" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="registry-server" containerID="cri-o://dce853ecadf32ba119cb72021a17c113c24e8ee47935a591a72a85a7f22fd31c" gracePeriod=2 Dec 03 16:43:43 crc kubenswrapper[4768]: I1203 16:43:43.321746 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerStarted","Data":"6da18d4b0ed0b271cbce252133b158713108e817aecddd6a1d095e0c1dac9cec"} Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.350452 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerStarted","Data":"dd1046965dad7d0bf31a456c9e7fb8ddad2c9582ded828c3f460d06b8fa38ce3"} Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.353629 4768 generic.go:334] "Generic (PLEG): container finished" podID="cfb313e1-4e5d-40b8-a882-82239fe7ae17" containerID="394e144f1f9efa209bd84a5a938002f2ce150e3b1ba69d23cdadbb701d1f28d8" exitCode=0 Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.353745 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" event={"ID":"cfb313e1-4e5d-40b8-a882-82239fe7ae17","Type":"ContainerDied","Data":"394e144f1f9efa209bd84a5a938002f2ce150e3b1ba69d23cdadbb701d1f28d8"} Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.369671 4768 generic.go:334] "Generic (PLEG): container finished" podID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerID="dce853ecadf32ba119cb72021a17c113c24e8ee47935a591a72a85a7f22fd31c" exitCode=0 Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.369730 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxp8w" event={"ID":"1648c868-22a9-4e98-9d15-b8bc2ca021d6","Type":"ContainerDied","Data":"dce853ecadf32ba119cb72021a17c113c24e8ee47935a591a72a85a7f22fd31c"} Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.524767 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.713891 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sj9kv\" (UniqueName: \"kubernetes.io/projected/1648c868-22a9-4e98-9d15-b8bc2ca021d6-kube-api-access-sj9kv\") pod \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.714041 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-catalog-content\") pod \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.714111 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-utilities\") pod \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\" (UID: \"1648c868-22a9-4e98-9d15-b8bc2ca021d6\") " Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.715417 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-utilities" (OuterVolumeSpecName: "utilities") pod "1648c868-22a9-4e98-9d15-b8bc2ca021d6" (UID: "1648c868-22a9-4e98-9d15-b8bc2ca021d6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.715924 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.730719 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1648c868-22a9-4e98-9d15-b8bc2ca021d6-kube-api-access-sj9kv" (OuterVolumeSpecName: "kube-api-access-sj9kv") pod "1648c868-22a9-4e98-9d15-b8bc2ca021d6" (UID: "1648c868-22a9-4e98-9d15-b8bc2ca021d6"). InnerVolumeSpecName "kube-api-access-sj9kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.818131 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sj9kv\" (UniqueName: \"kubernetes.io/projected/1648c868-22a9-4e98-9d15-b8bc2ca021d6-kube-api-access-sj9kv\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.850737 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1648c868-22a9-4e98-9d15-b8bc2ca021d6" (UID: "1648c868-22a9-4e98-9d15-b8bc2ca021d6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:43:45 crc kubenswrapper[4768]: I1203 16:43:45.920632 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1648c868-22a9-4e98-9d15-b8bc2ca021d6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.384031 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cxp8w" event={"ID":"1648c868-22a9-4e98-9d15-b8bc2ca021d6","Type":"ContainerDied","Data":"10dfd760109caff296e1e00c54c42c6adf50fa6b2909281d4639eaab338bfcff"} Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.384071 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cxp8w" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.384509 4768 scope.go:117] "RemoveContainer" containerID="dce853ecadf32ba119cb72021a17c113c24e8ee47935a591a72a85a7f22fd31c" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.415093 4768 scope.go:117] "RemoveContainer" containerID="c4705eb87ca832f35ed55991704524b1b6e09dd61b942da4ac3d207cc5cfdac5" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.455038 4768 scope.go:117] "RemoveContainer" containerID="092d7a65b7979f42887b6a0676cfe2827b9accc43d588160afa80d3acbca439a" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.467490 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cxp8w"] Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.480908 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cxp8w"] Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.732325 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.847810 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-combined-ca-bundle\") pod \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.848263 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-config-data\") pod \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.848293 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-scripts\") pod \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.848502 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7jxz\" (UniqueName: \"kubernetes.io/projected/cfb313e1-4e5d-40b8-a882-82239fe7ae17-kube-api-access-l7jxz\") pod \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\" (UID: \"cfb313e1-4e5d-40b8-a882-82239fe7ae17\") " Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.854560 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-scripts" (OuterVolumeSpecName: "scripts") pod "cfb313e1-4e5d-40b8-a882-82239fe7ae17" (UID: "cfb313e1-4e5d-40b8-a882-82239fe7ae17"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.854807 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfb313e1-4e5d-40b8-a882-82239fe7ae17-kube-api-access-l7jxz" (OuterVolumeSpecName: "kube-api-access-l7jxz") pod "cfb313e1-4e5d-40b8-a882-82239fe7ae17" (UID: "cfb313e1-4e5d-40b8-a882-82239fe7ae17"). InnerVolumeSpecName "kube-api-access-l7jxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.885022 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-config-data" (OuterVolumeSpecName: "config-data") pod "cfb313e1-4e5d-40b8-a882-82239fe7ae17" (UID: "cfb313e1-4e5d-40b8-a882-82239fe7ae17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.889448 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cfb313e1-4e5d-40b8-a882-82239fe7ae17" (UID: "cfb313e1-4e5d-40b8-a882-82239fe7ae17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.950772 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.950823 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.950842 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7jxz\" (UniqueName: \"kubernetes.io/projected/cfb313e1-4e5d-40b8-a882-82239fe7ae17-kube-api-access-l7jxz\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:46 crc kubenswrapper[4768]: I1203 16:43:46.950860 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb313e1-4e5d-40b8-a882-82239fe7ae17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.409019 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerStarted","Data":"4fcf75c76a3680c219fd7a49c047994c3d54d4cddee095e15187fd0251b4d0a9"} Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.409129 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.413256 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" event={"ID":"cfb313e1-4e5d-40b8-a882-82239fe7ae17","Type":"ContainerDied","Data":"1711967dea3681a14f915810090cff66141d4fc11442a498db665100753b2717"} Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.413297 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1711967dea3681a14f915810090cff66141d4fc11442a498db665100753b2717" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.413271 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6x6hp" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.449587 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.426704386 podStartE2EDuration="9.449559842s" podCreationTimestamp="2025-12-03 16:43:38 +0000 UTC" firstStartedPulling="2025-12-03 16:43:39.133503401 +0000 UTC m=+1516.052839834" lastFinishedPulling="2025-12-03 16:43:46.156358857 +0000 UTC m=+1523.075695290" observedRunningTime="2025-12-03 16:43:47.432952136 +0000 UTC m=+1524.352288569" watchObservedRunningTime="2025-12-03 16:43:47.449559842 +0000 UTC m=+1524.368896265" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.496408 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:43:47 crc kubenswrapper[4768]: E1203 16:43:47.499420 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfb313e1-4e5d-40b8-a882-82239fe7ae17" containerName="nova-cell0-conductor-db-sync" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.499450 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfb313e1-4e5d-40b8-a882-82239fe7ae17" containerName="nova-cell0-conductor-db-sync" Dec 03 16:43:47 crc kubenswrapper[4768]: E1203 16:43:47.499463 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="extract-utilities" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.499473 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="extract-utilities" Dec 03 16:43:47 crc kubenswrapper[4768]: E1203 16:43:47.499521 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="registry-server" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.499530 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="registry-server" Dec 03 16:43:47 crc kubenswrapper[4768]: E1203 16:43:47.499565 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="extract-content" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.499573 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="extract-content" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.501725 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfb313e1-4e5d-40b8-a882-82239fe7ae17" containerName="nova-cell0-conductor-db-sync" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.501765 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" containerName="registry-server" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.506509 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.528360 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.535230 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-k4fsr" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.556123 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1648c868-22a9-4e98-9d15-b8bc2ca021d6" path="/var/lib/kubelet/pods/1648c868-22a9-4e98-9d15-b8bc2ca021d6/volumes" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.557185 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.575158 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.576012 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.576164 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mbjq\" (UniqueName: \"kubernetes.io/projected/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-kube-api-access-7mbjq\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.678053 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.679671 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.679863 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mbjq\" (UniqueName: \"kubernetes.io/projected/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-kube-api-access-7mbjq\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.683076 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.683536 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.698631 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mbjq\" (UniqueName: \"kubernetes.io/projected/8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8-kube-api-access-7mbjq\") pod \"nova-cell0-conductor-0\" (UID: \"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8\") " pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:47 crc kubenswrapper[4768]: I1203 16:43:47.849353 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.019243 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gtfgx"] Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.022420 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.034177 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtfgx"] Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.086471 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-utilities\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.086551 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-catalog-content\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.086634 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hshf6\" (UniqueName: \"kubernetes.io/projected/1aa5e321-fa2e-47fa-9874-1e421a525c77-kube-api-access-hshf6\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.189587 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-catalog-content\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.189717 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hshf6\" (UniqueName: \"kubernetes.io/projected/1aa5e321-fa2e-47fa-9874-1e421a525c77-kube-api-access-hshf6\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.189828 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-utilities\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.190288 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-catalog-content\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.190332 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-utilities\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.212222 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hshf6\" (UniqueName: \"kubernetes.io/projected/1aa5e321-fa2e-47fa-9874-1e421a525c77-kube-api-access-hshf6\") pod \"community-operators-gtfgx\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.352450 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.406421 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.442546 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8","Type":"ContainerStarted","Data":"c1339d402da36cef417e20716585f6dabeca517daaeb10e8743ef530a1c29a06"} Dec 03 16:43:48 crc kubenswrapper[4768]: I1203 16:43:48.870232 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gtfgx"] Dec 03 16:43:49 crc kubenswrapper[4768]: I1203 16:43:49.452651 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtfgx" event={"ID":"1aa5e321-fa2e-47fa-9874-1e421a525c77","Type":"ContainerStarted","Data":"7bbb4bbc7337db45ff3e28528f31dc6d9c79ac404a0694095d61c01a442eb1c1"} Dec 03 16:43:51 crc kubenswrapper[4768]: I1203 16:43:51.475101 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8","Type":"ContainerStarted","Data":"b668ee268fe42d95070bd8a0b4f68749dbc8230ff729472067e7d0f6fd219ed4"} Dec 03 16:43:51 crc kubenswrapper[4768]: I1203 16:43:51.476241 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:51 crc kubenswrapper[4768]: I1203 16:43:51.477856 4768 generic.go:334] "Generic (PLEG): container finished" podID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerID="a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8" exitCode=0 Dec 03 16:43:51 crc kubenswrapper[4768]: I1203 16:43:51.477918 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtfgx" event={"ID":"1aa5e321-fa2e-47fa-9874-1e421a525c77","Type":"ContainerDied","Data":"a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8"} Dec 03 16:43:51 crc kubenswrapper[4768]: I1203 16:43:51.506245 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=4.506227131 podStartE2EDuration="4.506227131s" podCreationTimestamp="2025-12-03 16:43:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:43:51.495512856 +0000 UTC m=+1528.414849279" watchObservedRunningTime="2025-12-03 16:43:51.506227131 +0000 UTC m=+1528.425563554" Dec 03 16:43:52 crc kubenswrapper[4768]: I1203 16:43:52.500739 4768 generic.go:334] "Generic (PLEG): container finished" podID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerID="cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5" exitCode=0 Dec 03 16:43:52 crc kubenswrapper[4768]: I1203 16:43:52.500867 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtfgx" event={"ID":"1aa5e321-fa2e-47fa-9874-1e421a525c77","Type":"ContainerDied","Data":"cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5"} Dec 03 16:43:53 crc kubenswrapper[4768]: I1203 16:43:53.512271 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtfgx" event={"ID":"1aa5e321-fa2e-47fa-9874-1e421a525c77","Type":"ContainerStarted","Data":"353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13"} Dec 03 16:43:53 crc kubenswrapper[4768]: I1203 16:43:53.529858 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gtfgx" podStartSLOduration=4.993910506 podStartE2EDuration="6.529834319s" podCreationTimestamp="2025-12-03 16:43:47 +0000 UTC" firstStartedPulling="2025-12-03 16:43:51.479540896 +0000 UTC m=+1528.398877319" lastFinishedPulling="2025-12-03 16:43:53.015464709 +0000 UTC m=+1529.934801132" observedRunningTime="2025-12-03 16:43:53.529553072 +0000 UTC m=+1530.448889495" watchObservedRunningTime="2025-12-03 16:43:53.529834319 +0000 UTC m=+1530.449170762" Dec 03 16:43:56 crc kubenswrapper[4768]: I1203 16:43:56.028907 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:43:56 crc kubenswrapper[4768]: I1203 16:43:56.029536 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:43:57 crc kubenswrapper[4768]: I1203 16:43:57.907046 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 03 16:43:58 crc kubenswrapper[4768]: I1203 16:43:58.353475 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:58 crc kubenswrapper[4768]: I1203 16:43:58.353551 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:58 crc kubenswrapper[4768]: I1203 16:43:58.438488 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:58 crc kubenswrapper[4768]: I1203 16:43:58.654725 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:43:58 crc kubenswrapper[4768]: I1203 16:43:58.713134 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtfgx"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.112010 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-bcfpr"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.114232 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.117520 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.119050 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.136535 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbxch\" (UniqueName: \"kubernetes.io/projected/89dc866f-2349-4947-8f06-ca7046e66709-kube-api-access-gbxch\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.136739 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.136846 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-scripts\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.136900 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-config-data\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.137287 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-bcfpr"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.240113 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-scripts\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.240186 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-config-data\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.240231 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbxch\" (UniqueName: \"kubernetes.io/projected/89dc866f-2349-4947-8f06-ca7046e66709-kube-api-access-gbxch\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.240350 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.255319 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-config-data\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.266253 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.287130 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-scripts\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.288334 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.295250 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.302406 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbxch\" (UniqueName: \"kubernetes.io/projected/89dc866f-2349-4947-8f06-ca7046e66709-kube-api-access-gbxch\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.328456 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-bcfpr\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.333612 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.342575 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-config-data\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.342634 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a3c796-2746-4e52-9501-19dd42475620-logs\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.342662 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.342687 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk8pz\" (UniqueName: \"kubernetes.io/projected/f7a3c796-2746-4e52-9501-19dd42475620-kube-api-access-nk8pz\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.419818 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.426702 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.434720 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.448463 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.450174 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.451222 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.452099 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.455918 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.456001 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-config-data\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.456071 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-config-data\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.456094 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a3c796-2746-4e52-9501-19dd42475620-logs\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.456127 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.456182 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk8pz\" (UniqueName: \"kubernetes.io/projected/f7a3c796-2746-4e52-9501-19dd42475620-kube-api-access-nk8pz\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.456209 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvvwn\" (UniqueName: \"kubernetes.io/projected/7ad74486-a75f-48f5-aba6-f1df78464ebd-kube-api-access-tvvwn\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.457096 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a3c796-2746-4e52-9501-19dd42475620-logs\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.466431 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.482331 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-config-data\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.519309 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk8pz\" (UniqueName: \"kubernetes.io/projected/f7a3c796-2746-4e52-9501-19dd42475620-kube-api-access-nk8pz\") pod \"nova-api-0\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.530877 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.560189 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.560230 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.560324 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-config-data\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.573842 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-logs\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.573921 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-config-data\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.573983 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74gfs\" (UniqueName: \"kubernetes.io/projected/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-kube-api-access-74gfs\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.574012 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvvwn\" (UniqueName: \"kubernetes.io/projected/7ad74486-a75f-48f5-aba6-f1df78464ebd-kube-api-access-tvvwn\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.580239 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.583779 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.594102 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-config-data\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.596392 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvvwn\" (UniqueName: \"kubernetes.io/projected/7ad74486-a75f-48f5-aba6-f1df78464ebd-kube-api-access-tvvwn\") pod \"nova-scheduler-0\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.606041 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.628541 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.630320 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.632818 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.652109 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zcp5f"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.653997 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.676282 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.676611 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h495v\" (UniqueName: \"kubernetes.io/projected/0264e152-fc1f-4658-bb13-cbbc8c713b73-kube-api-access-h495v\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.676687 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.677329 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.677718 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-logs\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.677784 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-config-data\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.677852 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74gfs\" (UniqueName: \"kubernetes.io/projected/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-kube-api-access-74gfs\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.678228 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-logs\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.680709 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.684348 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-config-data\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.696230 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.698435 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74gfs\" (UniqueName: \"kubernetes.io/projected/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-kube-api-access-74gfs\") pod \"nova-metadata-0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.705800 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zcp5f"] Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.739281 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780014 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780065 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780085 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-config\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780111 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780141 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h495v\" (UniqueName: \"kubernetes.io/projected/0264e152-fc1f-4658-bb13-cbbc8c713b73-kube-api-access-h495v\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780161 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780189 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-775xs\" (UniqueName: \"kubernetes.io/projected/62788624-724c-491a-9ed8-ca208a25a98b-kube-api-access-775xs\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780225 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-svc\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.780251 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.783581 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.784083 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.799051 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h495v\" (UniqueName: \"kubernetes.io/projected/0264e152-fc1f-4658-bb13-cbbc8c713b73-kube-api-access-h495v\") pod \"nova-cell1-novncproxy-0\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.882869 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-svc\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.883058 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.883091 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.884183 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-sb\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.884536 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-swift-storage-0\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.884654 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-svc\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.885082 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-config\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.883273 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-config\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.887250 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.887322 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-nb\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.887416 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-775xs\" (UniqueName: \"kubernetes.io/projected/62788624-724c-491a-9ed8-ca208a25a98b-kube-api-access-775xs\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.922324 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-775xs\" (UniqueName: \"kubernetes.io/projected/62788624-724c-491a-9ed8-ca208a25a98b-kube-api-access-775xs\") pod \"dnsmasq-dns-78cd565959-zcp5f\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.925209 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.956205 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:43:59 crc kubenswrapper[4768]: I1203 16:43:59.991750 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.207113 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-bcfpr"] Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.389217 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.531431 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.617291 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7a3c796-2746-4e52-9501-19dd42475620","Type":"ContainerStarted","Data":"16b37be1c2f0af825cccd483fe44eb3503cbdddac271a0004a1aa4ce29ed35d1"} Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.619095 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bcfpr" event={"ID":"89dc866f-2349-4947-8f06-ca7046e66709","Type":"ContainerStarted","Data":"0703dda317c407ddc730d24e20ff4c0e63bcd1d67f9f38de97b61125fc8cd000"} Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.619998 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ad74486-a75f-48f5-aba6-f1df78464ebd","Type":"ContainerStarted","Data":"81b22896a1804671c92b4b4aac86516c74c2e3115f017b5313463611a7194700"} Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.620194 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gtfgx" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="registry-server" containerID="cri-o://353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13" gracePeriod=2 Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.751739 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zrc69"] Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.754045 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.757452 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.759641 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.770373 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zrc69"] Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.803908 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.836718 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htv2h\" (UniqueName: \"kubernetes.io/projected/1d6daa39-bd99-4e98-a817-c18efd139e3c-kube-api-access-htv2h\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.836793 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.836868 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-scripts\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.836931 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-config-data\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.939057 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-scripts\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.940548 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-config-data\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.940775 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htv2h\" (UniqueName: \"kubernetes.io/projected/1d6daa39-bd99-4e98-a817-c18efd139e3c-kube-api-access-htv2h\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.940839 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.944944 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-scripts\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.945296 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-config-data\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.946411 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:00 crc kubenswrapper[4768]: I1203 16:44:00.969045 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htv2h\" (UniqueName: \"kubernetes.io/projected/1d6daa39-bd99-4e98-a817-c18efd139e3c-kube-api-access-htv2h\") pod \"nova-cell1-conductor-db-sync-zrc69\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.085895 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.175017 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zcp5f"] Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.190991 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.233925 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.352754 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-catalog-content\") pod \"1aa5e321-fa2e-47fa-9874-1e421a525c77\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.352880 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hshf6\" (UniqueName: \"kubernetes.io/projected/1aa5e321-fa2e-47fa-9874-1e421a525c77-kube-api-access-hshf6\") pod \"1aa5e321-fa2e-47fa-9874-1e421a525c77\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.353332 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-utilities\") pod \"1aa5e321-fa2e-47fa-9874-1e421a525c77\" (UID: \"1aa5e321-fa2e-47fa-9874-1e421a525c77\") " Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.354733 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-utilities" (OuterVolumeSpecName: "utilities") pod "1aa5e321-fa2e-47fa-9874-1e421a525c77" (UID: "1aa5e321-fa2e-47fa-9874-1e421a525c77"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.363455 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1aa5e321-fa2e-47fa-9874-1e421a525c77-kube-api-access-hshf6" (OuterVolumeSpecName: "kube-api-access-hshf6") pod "1aa5e321-fa2e-47fa-9874-1e421a525c77" (UID: "1aa5e321-fa2e-47fa-9874-1e421a525c77"). InnerVolumeSpecName "kube-api-access-hshf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.436148 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1aa5e321-fa2e-47fa-9874-1e421a525c77" (UID: "1aa5e321-fa2e-47fa-9874-1e421a525c77"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.455989 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.456040 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hshf6\" (UniqueName: \"kubernetes.io/projected/1aa5e321-fa2e-47fa-9874-1e421a525c77-kube-api-access-hshf6\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.456065 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1aa5e321-fa2e-47fa-9874-1e421a525c77-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.621978 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zrc69"] Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.636081 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0264e152-fc1f-4658-bb13-cbbc8c713b73","Type":"ContainerStarted","Data":"aba584784a56af786950d94d4f3513800cc65dfa740e7f0adba688b5106ceb8d"} Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.643812 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0","Type":"ContainerStarted","Data":"48e95b5b8cf2acf91ca12f096e4e0190d1ab503f40c7db3f7920c3b5f96db78e"} Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.647716 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bcfpr" event={"ID":"89dc866f-2349-4947-8f06-ca7046e66709","Type":"ContainerStarted","Data":"b6d6a5ff21df3ff14e4289bf01a3a2e197261eebd4b2ebacf995c27133347dc4"} Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.662739 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" event={"ID":"62788624-724c-491a-9ed8-ca208a25a98b","Type":"ContainerStarted","Data":"4372aae51eb0c5342ee4dc9620536607242f4dfa65a9004475855b83f4e49c9d"} Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.663871 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" event={"ID":"62788624-724c-491a-9ed8-ca208a25a98b","Type":"ContainerStarted","Data":"83fad0930bc08c3dd7801d8ddeb085ce87ff04fab4d7cb50938fc670a5fcdfb5"} Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.693225 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-bcfpr" podStartSLOduration=2.693209381 podStartE2EDuration="2.693209381s" podCreationTimestamp="2025-12-03 16:43:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:01.692066762 +0000 UTC m=+1538.611403185" watchObservedRunningTime="2025-12-03 16:44:01.693209381 +0000 UTC m=+1538.612545804" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.752254 4768 generic.go:334] "Generic (PLEG): container finished" podID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerID="353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13" exitCode=0 Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.752407 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtfgx" event={"ID":"1aa5e321-fa2e-47fa-9874-1e421a525c77","Type":"ContainerDied","Data":"353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13"} Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.752484 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gtfgx" event={"ID":"1aa5e321-fa2e-47fa-9874-1e421a525c77","Type":"ContainerDied","Data":"7bbb4bbc7337db45ff3e28528f31dc6d9c79ac404a0694095d61c01a442eb1c1"} Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.752563 4768 scope.go:117] "RemoveContainer" containerID="353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.753973 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gtfgx" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.842768 4768 scope.go:117] "RemoveContainer" containerID="cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.859071 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gtfgx"] Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.868908 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gtfgx"] Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.905292 4768 scope.go:117] "RemoveContainer" containerID="a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.964808 4768 scope.go:117] "RemoveContainer" containerID="353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13" Dec 03 16:44:01 crc kubenswrapper[4768]: E1203 16:44:01.969080 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13\": container with ID starting with 353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13 not found: ID does not exist" containerID="353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.969118 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13"} err="failed to get container status \"353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13\": rpc error: code = NotFound desc = could not find container \"353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13\": container with ID starting with 353bd57d6cd3d60e71f28c6fba5ff7d19e2714c617b9cabed547a8c5f64afc13 not found: ID does not exist" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.969142 4768 scope.go:117] "RemoveContainer" containerID="cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5" Dec 03 16:44:01 crc kubenswrapper[4768]: E1203 16:44:01.971051 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5\": container with ID starting with cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5 not found: ID does not exist" containerID="cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.971073 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5"} err="failed to get container status \"cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5\": rpc error: code = NotFound desc = could not find container \"cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5\": container with ID starting with cda5c4550f3552d66a3c6dd69e8519a4a500b32e1e16d2601c1accb3d4020ef5 not found: ID does not exist" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.971087 4768 scope.go:117] "RemoveContainer" containerID="a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8" Dec 03 16:44:01 crc kubenswrapper[4768]: E1203 16:44:01.971691 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8\": container with ID starting with a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8 not found: ID does not exist" containerID="a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8" Dec 03 16:44:01 crc kubenswrapper[4768]: I1203 16:44:01.971747 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8"} err="failed to get container status \"a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8\": rpc error: code = NotFound desc = could not find container \"a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8\": container with ID starting with a54d63210581ff4f36bddef60229543738561037adf4052da42e57fe79e186d8 not found: ID does not exist" Dec 03 16:44:02 crc kubenswrapper[4768]: I1203 16:44:02.768980 4768 generic.go:334] "Generic (PLEG): container finished" podID="62788624-724c-491a-9ed8-ca208a25a98b" containerID="4372aae51eb0c5342ee4dc9620536607242f4dfa65a9004475855b83f4e49c9d" exitCode=0 Dec 03 16:44:02 crc kubenswrapper[4768]: I1203 16:44:02.769086 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" event={"ID":"62788624-724c-491a-9ed8-ca208a25a98b","Type":"ContainerDied","Data":"4372aae51eb0c5342ee4dc9620536607242f4dfa65a9004475855b83f4e49c9d"} Dec 03 16:44:02 crc kubenswrapper[4768]: I1203 16:44:02.772302 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zrc69" event={"ID":"1d6daa39-bd99-4e98-a817-c18efd139e3c","Type":"ContainerStarted","Data":"0bdc5bff95bafddb5e456364d3946694a10ee078c227fdac0d93aeef641538fb"} Dec 03 16:44:02 crc kubenswrapper[4768]: I1203 16:44:02.772340 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zrc69" event={"ID":"1d6daa39-bd99-4e98-a817-c18efd139e3c","Type":"ContainerStarted","Data":"f1d878b46f9aa926ecfc195473eafdbe90017e7fc3123ea7ab74e12b632998f6"} Dec 03 16:44:02 crc kubenswrapper[4768]: I1203 16:44:02.828554 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-zrc69" podStartSLOduration=2.8285349159999997 podStartE2EDuration="2.828534916s" podCreationTimestamp="2025-12-03 16:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:02.805236568 +0000 UTC m=+1539.724572991" watchObservedRunningTime="2025-12-03 16:44:02.828534916 +0000 UTC m=+1539.747871339" Dec 03 16:44:03 crc kubenswrapper[4768]: I1203 16:44:03.284161 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:03 crc kubenswrapper[4768]: I1203 16:44:03.303986 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:44:03 crc kubenswrapper[4768]: I1203 16:44:03.546715 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" path="/var/lib/kubelet/pods/1aa5e321-fa2e-47fa-9874-1e421a525c77/volumes" Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.810408 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7a3c796-2746-4e52-9501-19dd42475620","Type":"ContainerStarted","Data":"f002cf5cea834dd7401054b1bb543981a2ab408383ef36d6b267d1c1e78e3d50"} Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.810987 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7a3c796-2746-4e52-9501-19dd42475620","Type":"ContainerStarted","Data":"c8e550b8110760be55360e341b5df93c042c778826c1f82d32a3f13d0b09a0c7"} Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.813179 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0264e152-fc1f-4658-bb13-cbbc8c713b73","Type":"ContainerStarted","Data":"1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8"} Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.813321 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="0264e152-fc1f-4658-bb13-cbbc8c713b73" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8" gracePeriod=30 Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.816210 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-log" containerID="cri-o://5b9368e91148afe702e4ec7e3d1a907586f6e31e44a1dc0f1b1f567a054a539a" gracePeriod=30 Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.816306 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-metadata" containerID="cri-o://e5aaeae93e22911c6805f1301918842262462a7c9252319f7fec8acee3c8a92f" gracePeriod=30 Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.817015 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0","Type":"ContainerStarted","Data":"e5aaeae93e22911c6805f1301918842262462a7c9252319f7fec8acee3c8a92f"} Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.817067 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0","Type":"ContainerStarted","Data":"5b9368e91148afe702e4ec7e3d1a907586f6e31e44a1dc0f1b1f567a054a539a"} Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.819874 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" event={"ID":"62788624-724c-491a-9ed8-ca208a25a98b","Type":"ContainerStarted","Data":"e231f5a86f32d575c7f78cc7bd35b9a55c9d2f18c222886dbdcdea4e81e97846"} Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.820095 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.822156 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ad74486-a75f-48f5-aba6-f1df78464ebd","Type":"ContainerStarted","Data":"6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b"} Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.847006 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.672692615 podStartE2EDuration="6.846975702s" podCreationTimestamp="2025-12-03 16:43:59 +0000 UTC" firstStartedPulling="2025-12-03 16:44:00.524692276 +0000 UTC m=+1537.444028699" lastFinishedPulling="2025-12-03 16:44:04.698975323 +0000 UTC m=+1541.618311786" observedRunningTime="2025-12-03 16:44:05.84570056 +0000 UTC m=+1542.765036983" watchObservedRunningTime="2025-12-03 16:44:05.846975702 +0000 UTC m=+1542.766312155" Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.895506 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" podStartSLOduration=6.895481617 podStartE2EDuration="6.895481617s" podCreationTimestamp="2025-12-03 16:43:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:05.866858883 +0000 UTC m=+1542.786195316" watchObservedRunningTime="2025-12-03 16:44:05.895481617 +0000 UTC m=+1542.814818060" Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.900486 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.623879442 podStartE2EDuration="6.900474935s" podCreationTimestamp="2025-12-03 16:43:59 +0000 UTC" firstStartedPulling="2025-12-03 16:44:00.420983314 +0000 UTC m=+1537.340319737" lastFinishedPulling="2025-12-03 16:44:04.697578807 +0000 UTC m=+1541.616915230" observedRunningTime="2025-12-03 16:44:05.888360244 +0000 UTC m=+1542.807696677" watchObservedRunningTime="2025-12-03 16:44:05.900474935 +0000 UTC m=+1542.819811368" Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.952356 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.473506316 podStartE2EDuration="6.952319826s" podCreationTimestamp="2025-12-03 16:43:59 +0000 UTC" firstStartedPulling="2025-12-03 16:44:01.220099581 +0000 UTC m=+1538.139436004" lastFinishedPulling="2025-12-03 16:44:04.698913091 +0000 UTC m=+1541.618249514" observedRunningTime="2025-12-03 16:44:05.940231446 +0000 UTC m=+1542.859567909" watchObservedRunningTime="2025-12-03 16:44:05.952319826 +0000 UTC m=+1542.871656249" Dec 03 16:44:05 crc kubenswrapper[4768]: I1203 16:44:05.964837 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.07773949 podStartE2EDuration="6.964819427s" podCreationTimestamp="2025-12-03 16:43:59 +0000 UTC" firstStartedPulling="2025-12-03 16:44:00.813780484 +0000 UTC m=+1537.733116907" lastFinishedPulling="2025-12-03 16:44:04.700860421 +0000 UTC m=+1541.620196844" observedRunningTime="2025-12-03 16:44:05.958972856 +0000 UTC m=+1542.878309279" watchObservedRunningTime="2025-12-03 16:44:05.964819427 +0000 UTC m=+1542.884155850" Dec 03 16:44:06 crc kubenswrapper[4768]: I1203 16:44:06.841360 4768 generic.go:334] "Generic (PLEG): container finished" podID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerID="5b9368e91148afe702e4ec7e3d1a907586f6e31e44a1dc0f1b1f567a054a539a" exitCode=143 Dec 03 16:44:06 crc kubenswrapper[4768]: I1203 16:44:06.841477 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0","Type":"ContainerDied","Data":"5b9368e91148afe702e4ec7e3d1a907586f6e31e44a1dc0f1b1f567a054a539a"} Dec 03 16:44:08 crc kubenswrapper[4768]: I1203 16:44:08.687954 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 16:44:08 crc kubenswrapper[4768]: I1203 16:44:08.878285 4768 generic.go:334] "Generic (PLEG): container finished" podID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerID="e5aaeae93e22911c6805f1301918842262462a7c9252319f7fec8acee3c8a92f" exitCode=0 Dec 03 16:44:08 crc kubenswrapper[4768]: I1203 16:44:08.878327 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0","Type":"ContainerDied","Data":"e5aaeae93e22911c6805f1301918842262462a7c9252319f7fec8acee3c8a92f"} Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.212731 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.354923 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-combined-ca-bundle\") pod \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.354970 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-config-data\") pod \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.355186 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74gfs\" (UniqueName: \"kubernetes.io/projected/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-kube-api-access-74gfs\") pod \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.355213 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-logs\") pod \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\" (UID: \"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0\") " Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.355960 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-logs" (OuterVolumeSpecName: "logs") pod "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" (UID: "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.376829 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-kube-api-access-74gfs" (OuterVolumeSpecName: "kube-api-access-74gfs") pod "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" (UID: "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0"). InnerVolumeSpecName "kube-api-access-74gfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.434227 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-config-data" (OuterVolumeSpecName: "config-data") pod "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" (UID: "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.444764 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" (UID: "3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.457005 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.457040 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.457050 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74gfs\" (UniqueName: \"kubernetes.io/projected/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-kube-api-access-74gfs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.457061 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.606773 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.606840 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.636825 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.740923 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.740975 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.894260 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0","Type":"ContainerDied","Data":"48e95b5b8cf2acf91ca12f096e4e0190d1ab503f40c7db3f7920c3b5f96db78e"} Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.894331 4768 scope.go:117] "RemoveContainer" containerID="e5aaeae93e22911c6805f1301918842262462a7c9252319f7fec8acee3c8a92f" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.894516 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.902159 4768 generic.go:334] "Generic (PLEG): container finished" podID="89dc866f-2349-4947-8f06-ca7046e66709" containerID="b6d6a5ff21df3ff14e4289bf01a3a2e197261eebd4b2ebacf995c27133347dc4" exitCode=0 Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.902347 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bcfpr" event={"ID":"89dc866f-2349-4947-8f06-ca7046e66709","Type":"ContainerDied","Data":"b6d6a5ff21df3ff14e4289bf01a3a2e197261eebd4b2ebacf995c27133347dc4"} Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.948998 4768 scope.go:117] "RemoveContainer" containerID="5b9368e91148afe702e4ec7e3d1a907586f6e31e44a1dc0f1b1f567a054a539a" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.953741 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.954300 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.956854 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.968826 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.982239 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:09 crc kubenswrapper[4768]: E1203 16:44:09.982721 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="registry-server" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.982739 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="registry-server" Dec 03 16:44:09 crc kubenswrapper[4768]: E1203 16:44:09.982757 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-log" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.982763 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-log" Dec 03 16:44:09 crc kubenswrapper[4768]: E1203 16:44:09.982775 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="extract-utilities" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.982782 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="extract-utilities" Dec 03 16:44:09 crc kubenswrapper[4768]: E1203 16:44:09.982799 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="extract-content" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.982804 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="extract-content" Dec 03 16:44:09 crc kubenswrapper[4768]: E1203 16:44:09.982832 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-metadata" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.982838 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-metadata" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.983024 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-log" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.983039 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" containerName="nova-metadata-metadata" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.983053 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="1aa5e321-fa2e-47fa-9874-1e421a525c77" containerName="registry-server" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.984181 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.987283 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.987480 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 16:44:09 crc kubenswrapper[4768]: I1203 16:44:09.998941 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.010549 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.070422 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/532862e3-2ede-4002-89a4-8a0131cdf0f5-logs\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.070814 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-config-data\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.071054 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.071257 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.071464 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxtq7\" (UniqueName: \"kubernetes.io/projected/532862e3-2ede-4002-89a4-8a0131cdf0f5-kube-api-access-mxtq7\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.092538 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-4gftf"] Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.092796 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" podUID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerName="dnsmasq-dns" containerID="cri-o://66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf" gracePeriod=10 Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.173683 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.173777 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxtq7\" (UniqueName: \"kubernetes.io/projected/532862e3-2ede-4002-89a4-8a0131cdf0f5-kube-api-access-mxtq7\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.173808 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/532862e3-2ede-4002-89a4-8a0131cdf0f5-logs\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.174749 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/532862e3-2ede-4002-89a4-8a0131cdf0f5-logs\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.174880 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-config-data\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.174966 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.179719 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-config-data\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.181038 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.196795 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.212828 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxtq7\" (UniqueName: \"kubernetes.io/projected/532862e3-2ede-4002-89a4-8a0131cdf0f5-kube-api-access-mxtq7\") pod \"nova-metadata-0\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.303113 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.743028 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.822802 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.209:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.822990 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.209:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.893499 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-swift-storage-0\") pod \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.893907 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-sb\") pod \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.894000 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-config\") pod \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.894074 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kj5p\" (UniqueName: \"kubernetes.io/projected/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-kube-api-access-7kj5p\") pod \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.894189 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-nb\") pod \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.894227 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-svc\") pod \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\" (UID: \"4b54d43f-bef8-4ee6-909f-f550a7e70cbd\") " Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.901932 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-kube-api-access-7kj5p" (OuterVolumeSpecName: "kube-api-access-7kj5p") pod "4b54d43f-bef8-4ee6-909f-f550a7e70cbd" (UID: "4b54d43f-bef8-4ee6-909f-f550a7e70cbd"). InnerVolumeSpecName "kube-api-access-7kj5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.908687 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.962245 4768 generic.go:334] "Generic (PLEG): container finished" podID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerID="66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf" exitCode=0 Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.962331 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" event={"ID":"4b54d43f-bef8-4ee6-909f-f550a7e70cbd","Type":"ContainerDied","Data":"66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf"} Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.962382 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" event={"ID":"4b54d43f-bef8-4ee6-909f-f550a7e70cbd","Type":"ContainerDied","Data":"7c12ac465cbc558a4c993f3d289cc80edd6b83603017b0b68b3e7cd43db46934"} Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.962401 4768 scope.go:117] "RemoveContainer" containerID="66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.962534 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67bdc55879-4gftf" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.982565 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4b54d43f-bef8-4ee6-909f-f550a7e70cbd" (UID: "4b54d43f-bef8-4ee6-909f-f550a7e70cbd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.990350 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4b54d43f-bef8-4ee6-909f-f550a7e70cbd" (UID: "4b54d43f-bef8-4ee6-909f-f550a7e70cbd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:10 crc kubenswrapper[4768]: I1203 16:44:10.999573 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.003875 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.003903 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kj5p\" (UniqueName: \"kubernetes.io/projected/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-kube-api-access-7kj5p\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.009336 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4b54d43f-bef8-4ee6-909f-f550a7e70cbd" (UID: "4b54d43f-bef8-4ee6-909f-f550a7e70cbd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.031456 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4b54d43f-bef8-4ee6-909f-f550a7e70cbd" (UID: "4b54d43f-bef8-4ee6-909f-f550a7e70cbd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.033157 4768 scope.go:117] "RemoveContainer" containerID="ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.055104 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-config" (OuterVolumeSpecName: "config") pod "4b54d43f-bef8-4ee6-909f-f550a7e70cbd" (UID: "4b54d43f-bef8-4ee6-909f-f550a7e70cbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.094379 4768 scope.go:117] "RemoveContainer" containerID="66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf" Dec 03 16:44:11 crc kubenswrapper[4768]: E1203 16:44:11.098181 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf\": container with ID starting with 66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf not found: ID does not exist" containerID="66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.098230 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf"} err="failed to get container status \"66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf\": rpc error: code = NotFound desc = could not find container \"66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf\": container with ID starting with 66ef364769100427130b3ae03d0384a556071e28acdff35e213b3f92c0da05bf not found: ID does not exist" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.098266 4768 scope.go:117] "RemoveContainer" containerID="ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7" Dec 03 16:44:11 crc kubenswrapper[4768]: E1203 16:44:11.099398 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7\": container with ID starting with ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7 not found: ID does not exist" containerID="ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.099426 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7"} err="failed to get container status \"ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7\": rpc error: code = NotFound desc = could not find container \"ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7\": container with ID starting with ee1425bf1fa69e69777ea90d3e94daefca5bfa30676a46166596efbeb7fb6dc7 not found: ID does not exist" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.105942 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.105971 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.105984 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4b54d43f-bef8-4ee6-909f-f550a7e70cbd-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.328312 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-4gftf"] Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.339374 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67bdc55879-4gftf"] Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.518062 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.542045 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0" path="/var/lib/kubelet/pods/3a3524c7-bcf9-451e-ae4b-ec2b93ab1dd0/volumes" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.542642 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" path="/var/lib/kubelet/pods/4b54d43f-bef8-4ee6-909f-f550a7e70cbd/volumes" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.621396 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-combined-ca-bundle\") pod \"89dc866f-2349-4947-8f06-ca7046e66709\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.621708 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbxch\" (UniqueName: \"kubernetes.io/projected/89dc866f-2349-4947-8f06-ca7046e66709-kube-api-access-gbxch\") pod \"89dc866f-2349-4947-8f06-ca7046e66709\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.621754 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-config-data\") pod \"89dc866f-2349-4947-8f06-ca7046e66709\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.621966 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-scripts\") pod \"89dc866f-2349-4947-8f06-ca7046e66709\" (UID: \"89dc866f-2349-4947-8f06-ca7046e66709\") " Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.663582 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89dc866f-2349-4947-8f06-ca7046e66709-kube-api-access-gbxch" (OuterVolumeSpecName: "kube-api-access-gbxch") pod "89dc866f-2349-4947-8f06-ca7046e66709" (UID: "89dc866f-2349-4947-8f06-ca7046e66709"). InnerVolumeSpecName "kube-api-access-gbxch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.672781 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-config-data" (OuterVolumeSpecName: "config-data") pod "89dc866f-2349-4947-8f06-ca7046e66709" (UID: "89dc866f-2349-4947-8f06-ca7046e66709"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.675731 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-scripts" (OuterVolumeSpecName: "scripts") pod "89dc866f-2349-4947-8f06-ca7046e66709" (UID: "89dc866f-2349-4947-8f06-ca7046e66709"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.691535 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89dc866f-2349-4947-8f06-ca7046e66709" (UID: "89dc866f-2349-4947-8f06-ca7046e66709"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.705322 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qhqdg"] Dec 03 16:44:11 crc kubenswrapper[4768]: E1203 16:44:11.708279 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerName="dnsmasq-dns" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.708311 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerName="dnsmasq-dns" Dec 03 16:44:11 crc kubenswrapper[4768]: E1203 16:44:11.708341 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89dc866f-2349-4947-8f06-ca7046e66709" containerName="nova-manage" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.708350 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="89dc866f-2349-4947-8f06-ca7046e66709" containerName="nova-manage" Dec 03 16:44:11 crc kubenswrapper[4768]: E1203 16:44:11.708399 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerName="init" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.708408 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerName="init" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.708721 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="89dc866f-2349-4947-8f06-ca7046e66709" containerName="nova-manage" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.708747 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b54d43f-bef8-4ee6-909f-f550a7e70cbd" containerName="dnsmasq-dns" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.713237 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.726246 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhqdg"] Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.733550 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbxch\" (UniqueName: \"kubernetes.io/projected/89dc866f-2349-4947-8f06-ca7046e66709-kube-api-access-gbxch\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.733589 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.733616 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.733625 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89dc866f-2349-4947-8f06-ca7046e66709-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.835513 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-utilities\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.835620 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-catalog-content\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.835668 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcszc\" (UniqueName: \"kubernetes.io/projected/79dd49d2-682f-4df3-ba0b-7f120289b0aa-kube-api-access-vcszc\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.937116 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-utilities\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.937643 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-utilities\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.937974 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-catalog-content\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.937588 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-catalog-content\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.938116 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcszc\" (UniqueName: \"kubernetes.io/projected/79dd49d2-682f-4df3-ba0b-7f120289b0aa-kube-api-access-vcszc\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.972755 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcszc\" (UniqueName: \"kubernetes.io/projected/79dd49d2-682f-4df3-ba0b-7f120289b0aa-kube-api-access-vcszc\") pod \"redhat-marketplace-qhqdg\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.978058 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"532862e3-2ede-4002-89a4-8a0131cdf0f5","Type":"ContainerStarted","Data":"f4442a29392b02509c413d37901f8c61df6b820f28bde5e13123859318b27372"} Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.978095 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"532862e3-2ede-4002-89a4-8a0131cdf0f5","Type":"ContainerStarted","Data":"8ddf30bc6ba6df26a0b026aae4f9d4972ce25ed92e77fca2a7a7e0a4631e9448"} Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.978107 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"532862e3-2ede-4002-89a4-8a0131cdf0f5","Type":"ContainerStarted","Data":"1d718b9b657c97a4d1f54cfdad46b5986eba1cd7cec1e0edeb9bae72a9006877"} Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.979723 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-bcfpr" Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.979724 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-bcfpr" event={"ID":"89dc866f-2349-4947-8f06-ca7046e66709","Type":"ContainerDied","Data":"0703dda317c407ddc730d24e20ff4c0e63bcd1d67f9f38de97b61125fc8cd000"} Dec 03 16:44:11 crc kubenswrapper[4768]: I1203 16:44:11.979790 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0703dda317c407ddc730d24e20ff4c0e63bcd1d67f9f38de97b61125fc8cd000" Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.020826 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.0208076 podStartE2EDuration="3.0208076s" podCreationTimestamp="2025-12-03 16:44:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:12.010172227 +0000 UTC m=+1548.929508670" watchObservedRunningTime="2025-12-03 16:44:12.0208076 +0000 UTC m=+1548.940144023" Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.044414 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.277426 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.278073 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-log" containerID="cri-o://c8e550b8110760be55360e341b5df93c042c778826c1f82d32a3f13d0b09a0c7" gracePeriod=30 Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.278675 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-api" containerID="cri-o://f002cf5cea834dd7401054b1bb543981a2ab408383ef36d6b267d1c1e78e3d50" gracePeriod=30 Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.300091 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.300323 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7ad74486-a75f-48f5-aba6-f1df78464ebd" containerName="nova-scheduler-scheduler" containerID="cri-o://6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b" gracePeriod=30 Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.338412 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:12 crc kubenswrapper[4768]: W1203 16:44:12.666985 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod79dd49d2_682f_4df3_ba0b_7f120289b0aa.slice/crio-0b8f8af318b9fa295af9b0a849249c9abad8dae19d4c7f894b7e1a8abc309a28 WatchSource:0}: Error finding container 0b8f8af318b9fa295af9b0a849249c9abad8dae19d4c7f894b7e1a8abc309a28: Status 404 returned error can't find the container with id 0b8f8af318b9fa295af9b0a849249c9abad8dae19d4c7f894b7e1a8abc309a28 Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.671500 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhqdg"] Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.997192 4768 generic.go:334] "Generic (PLEG): container finished" podID="f7a3c796-2746-4e52-9501-19dd42475620" containerID="c8e550b8110760be55360e341b5df93c042c778826c1f82d32a3f13d0b09a0c7" exitCode=143 Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.997286 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7a3c796-2746-4e52-9501-19dd42475620","Type":"ContainerDied","Data":"c8e550b8110760be55360e341b5df93c042c778826c1f82d32a3f13d0b09a0c7"} Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.999613 4768 generic.go:334] "Generic (PLEG): container finished" podID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerID="f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159" exitCode=0 Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.999720 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhqdg" event={"ID":"79dd49d2-682f-4df3-ba0b-7f120289b0aa","Type":"ContainerDied","Data":"f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159"} Dec 03 16:44:12 crc kubenswrapper[4768]: I1203 16:44:12.999763 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhqdg" event={"ID":"79dd49d2-682f-4df3-ba0b-7f120289b0aa","Type":"ContainerStarted","Data":"0b8f8af318b9fa295af9b0a849249c9abad8dae19d4c7f894b7e1a8abc309a28"} Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.762689 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.889957 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-combined-ca-bundle\") pod \"7ad74486-a75f-48f5-aba6-f1df78464ebd\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.890424 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-config-data\") pod \"7ad74486-a75f-48f5-aba6-f1df78464ebd\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.890482 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvvwn\" (UniqueName: \"kubernetes.io/projected/7ad74486-a75f-48f5-aba6-f1df78464ebd-kube-api-access-tvvwn\") pod \"7ad74486-a75f-48f5-aba6-f1df78464ebd\" (UID: \"7ad74486-a75f-48f5-aba6-f1df78464ebd\") " Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.896093 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ad74486-a75f-48f5-aba6-f1df78464ebd-kube-api-access-tvvwn" (OuterVolumeSpecName: "kube-api-access-tvvwn") pod "7ad74486-a75f-48f5-aba6-f1df78464ebd" (UID: "7ad74486-a75f-48f5-aba6-f1df78464ebd"). InnerVolumeSpecName "kube-api-access-tvvwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.930356 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ad74486-a75f-48f5-aba6-f1df78464ebd" (UID: "7ad74486-a75f-48f5-aba6-f1df78464ebd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.967386 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-config-data" (OuterVolumeSpecName: "config-data") pod "7ad74486-a75f-48f5-aba6-f1df78464ebd" (UID: "7ad74486-a75f-48f5-aba6-f1df78464ebd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.992750 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.992799 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ad74486-a75f-48f5-aba6-f1df78464ebd-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:13 crc kubenswrapper[4768]: I1203 16:44:13.992814 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvvwn\" (UniqueName: \"kubernetes.io/projected/7ad74486-a75f-48f5-aba6-f1df78464ebd-kube-api-access-tvvwn\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.012560 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhqdg" event={"ID":"79dd49d2-682f-4df3-ba0b-7f120289b0aa","Type":"ContainerStarted","Data":"727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77"} Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.014267 4768 generic.go:334] "Generic (PLEG): container finished" podID="7ad74486-a75f-48f5-aba6-f1df78464ebd" containerID="6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b" exitCode=0 Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.014328 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ad74486-a75f-48f5-aba6-f1df78464ebd","Type":"ContainerDied","Data":"6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b"} Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.014357 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7ad74486-a75f-48f5-aba6-f1df78464ebd","Type":"ContainerDied","Data":"81b22896a1804671c92b4b4aac86516c74c2e3115f017b5313463611a7194700"} Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.014386 4768 scope.go:117] "RemoveContainer" containerID="6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.014443 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-log" containerID="cri-o://8ddf30bc6ba6df26a0b026aae4f9d4972ce25ed92e77fca2a7a7e0a4631e9448" gracePeriod=30 Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.014329 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.014631 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-metadata" containerID="cri-o://f4442a29392b02509c413d37901f8c61df6b820f28bde5e13123859318b27372" gracePeriod=30 Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.043440 4768 scope.go:117] "RemoveContainer" containerID="6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b" Dec 03 16:44:14 crc kubenswrapper[4768]: E1203 16:44:14.044539 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b\": container with ID starting with 6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b not found: ID does not exist" containerID="6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.044692 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b"} err="failed to get container status \"6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b\": rpc error: code = NotFound desc = could not find container \"6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b\": container with ID starting with 6fa85a293cb0815a3600a88261304308e3acfb38b3c901a9d6e56f42a10fcd5b not found: ID does not exist" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.066333 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.080276 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.093033 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:14 crc kubenswrapper[4768]: E1203 16:44:14.093450 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ad74486-a75f-48f5-aba6-f1df78464ebd" containerName="nova-scheduler-scheduler" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.093469 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ad74486-a75f-48f5-aba6-f1df78464ebd" containerName="nova-scheduler-scheduler" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.093696 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ad74486-a75f-48f5-aba6-f1df78464ebd" containerName="nova-scheduler-scheduler" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.094474 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.096474 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.145637 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.198629 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bmpx\" (UniqueName: \"kubernetes.io/projected/97af800a-a6f3-4818-bfc9-c27e0bd25e44-kube-api-access-5bmpx\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.198843 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-config-data\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.199066 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.300809 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-config-data\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.300915 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.300958 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bmpx\" (UniqueName: \"kubernetes.io/projected/97af800a-a6f3-4818-bfc9-c27e0bd25e44-kube-api-access-5bmpx\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.307523 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.308384 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-config-data\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.322763 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bmpx\" (UniqueName: \"kubernetes.io/projected/97af800a-a6f3-4818-bfc9-c27e0bd25e44-kube-api-access-5bmpx\") pod \"nova-scheduler-0\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.425019 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.472775 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:44:14 crc kubenswrapper[4768]: I1203 16:44:14.472986 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" containerName="kube-state-metrics" containerID="cri-o://345c34cf99875fbe860f80abf60dcb72da8f284ebef0c7ec0242374b1ff2786f" gracePeriod=30 Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.032284 4768 generic.go:334] "Generic (PLEG): container finished" podID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerID="f4442a29392b02509c413d37901f8c61df6b820f28bde5e13123859318b27372" exitCode=0 Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.032645 4768 generic.go:334] "Generic (PLEG): container finished" podID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerID="8ddf30bc6ba6df26a0b026aae4f9d4972ce25ed92e77fca2a7a7e0a4631e9448" exitCode=143 Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.032712 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"532862e3-2ede-4002-89a4-8a0131cdf0f5","Type":"ContainerDied","Data":"f4442a29392b02509c413d37901f8c61df6b820f28bde5e13123859318b27372"} Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.032744 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"532862e3-2ede-4002-89a4-8a0131cdf0f5","Type":"ContainerDied","Data":"8ddf30bc6ba6df26a0b026aae4f9d4972ce25ed92e77fca2a7a7e0a4631e9448"} Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.032757 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"532862e3-2ede-4002-89a4-8a0131cdf0f5","Type":"ContainerDied","Data":"1d718b9b657c97a4d1f54cfdad46b5986eba1cd7cec1e0edeb9bae72a9006877"} Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.032769 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d718b9b657c97a4d1f54cfdad46b5986eba1cd7cec1e0edeb9bae72a9006877" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.036326 4768 generic.go:334] "Generic (PLEG): container finished" podID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" containerID="345c34cf99875fbe860f80abf60dcb72da8f284ebef0c7ec0242374b1ff2786f" exitCode=2 Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.036433 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f82fe5c1-e056-4fbb-bca3-7552b85daa9b","Type":"ContainerDied","Data":"345c34cf99875fbe860f80abf60dcb72da8f284ebef0c7ec0242374b1ff2786f"} Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.045065 4768 generic.go:334] "Generic (PLEG): container finished" podID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerID="727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77" exitCode=0 Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.045188 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhqdg" event={"ID":"79dd49d2-682f-4df3-ba0b-7f120289b0aa","Type":"ContainerDied","Data":"727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77"} Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.054347 4768 generic.go:334] "Generic (PLEG): container finished" podID="1d6daa39-bd99-4e98-a817-c18efd139e3c" containerID="0bdc5bff95bafddb5e456364d3946694a10ee078c227fdac0d93aeef641538fb" exitCode=0 Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.054386 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zrc69" event={"ID":"1d6daa39-bd99-4e98-a817-c18efd139e3c","Type":"ContainerDied","Data":"0bdc5bff95bafddb5e456364d3946694a10ee078c227fdac0d93aeef641538fb"} Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.069524 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.075063 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.149610 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:15 crc kubenswrapper[4768]: W1203 16:44:15.171682 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97af800a_a6f3_4818_bfc9_c27e0bd25e44.slice/crio-31b36c7e102e406bd8ba8fc8065d084804517b4ec3bba10a3e30b02da72892f2 WatchSource:0}: Error finding container 31b36c7e102e406bd8ba8fc8065d084804517b4ec3bba10a3e30b02da72892f2: Status 404 returned error can't find the container with id 31b36c7e102e406bd8ba8fc8065d084804517b4ec3bba10a3e30b02da72892f2 Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.244414 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk5th\" (UniqueName: \"kubernetes.io/projected/f82fe5c1-e056-4fbb-bca3-7552b85daa9b-kube-api-access-wk5th\") pod \"f82fe5c1-e056-4fbb-bca3-7552b85daa9b\" (UID: \"f82fe5c1-e056-4fbb-bca3-7552b85daa9b\") " Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.244814 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-combined-ca-bundle\") pod \"532862e3-2ede-4002-89a4-8a0131cdf0f5\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.244847 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-nova-metadata-tls-certs\") pod \"532862e3-2ede-4002-89a4-8a0131cdf0f5\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.245102 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-config-data\") pod \"532862e3-2ede-4002-89a4-8a0131cdf0f5\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.245165 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/532862e3-2ede-4002-89a4-8a0131cdf0f5-logs\") pod \"532862e3-2ede-4002-89a4-8a0131cdf0f5\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.245206 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxtq7\" (UniqueName: \"kubernetes.io/projected/532862e3-2ede-4002-89a4-8a0131cdf0f5-kube-api-access-mxtq7\") pod \"532862e3-2ede-4002-89a4-8a0131cdf0f5\" (UID: \"532862e3-2ede-4002-89a4-8a0131cdf0f5\") " Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.248457 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/532862e3-2ede-4002-89a4-8a0131cdf0f5-logs" (OuterVolumeSpecName: "logs") pod "532862e3-2ede-4002-89a4-8a0131cdf0f5" (UID: "532862e3-2ede-4002-89a4-8a0131cdf0f5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.253040 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/532862e3-2ede-4002-89a4-8a0131cdf0f5-kube-api-access-mxtq7" (OuterVolumeSpecName: "kube-api-access-mxtq7") pod "532862e3-2ede-4002-89a4-8a0131cdf0f5" (UID: "532862e3-2ede-4002-89a4-8a0131cdf0f5"). InnerVolumeSpecName "kube-api-access-mxtq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.260019 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f82fe5c1-e056-4fbb-bca3-7552b85daa9b-kube-api-access-wk5th" (OuterVolumeSpecName: "kube-api-access-wk5th") pod "f82fe5c1-e056-4fbb-bca3-7552b85daa9b" (UID: "f82fe5c1-e056-4fbb-bca3-7552b85daa9b"). InnerVolumeSpecName "kube-api-access-wk5th". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.304734 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-config-data" (OuterVolumeSpecName: "config-data") pod "532862e3-2ede-4002-89a4-8a0131cdf0f5" (UID: "532862e3-2ede-4002-89a4-8a0131cdf0f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.324513 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "532862e3-2ede-4002-89a4-8a0131cdf0f5" (UID: "532862e3-2ede-4002-89a4-8a0131cdf0f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.348493 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.348533 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/532862e3-2ede-4002-89a4-8a0131cdf0f5-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.348545 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxtq7\" (UniqueName: \"kubernetes.io/projected/532862e3-2ede-4002-89a4-8a0131cdf0f5-kube-api-access-mxtq7\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.348560 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk5th\" (UniqueName: \"kubernetes.io/projected/f82fe5c1-e056-4fbb-bca3-7552b85daa9b-kube-api-access-wk5th\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.348572 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.356543 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "532862e3-2ede-4002-89a4-8a0131cdf0f5" (UID: "532862e3-2ede-4002-89a4-8a0131cdf0f5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.457890 4768 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/532862e3-2ede-4002-89a4-8a0131cdf0f5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:15 crc kubenswrapper[4768]: I1203 16:44:15.542977 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ad74486-a75f-48f5-aba6-f1df78464ebd" path="/var/lib/kubelet/pods/7ad74486-a75f-48f5-aba6-f1df78464ebd/volumes" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.071202 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhqdg" event={"ID":"79dd49d2-682f-4df3-ba0b-7f120289b0aa","Type":"ContainerStarted","Data":"690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1"} Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.076908 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"97af800a-a6f3-4818-bfc9-c27e0bd25e44","Type":"ContainerStarted","Data":"3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474"} Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.076947 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"97af800a-a6f3-4818-bfc9-c27e0bd25e44","Type":"ContainerStarted","Data":"31b36c7e102e406bd8ba8fc8065d084804517b4ec3bba10a3e30b02da72892f2"} Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.083928 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.084104 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f82fe5c1-e056-4fbb-bca3-7552b85daa9b","Type":"ContainerDied","Data":"e2b99f1e92bacf0abbe5568fdab1e4e9962c8fc65b2999ea8a616808350f0d43"} Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.084179 4768 scope.go:117] "RemoveContainer" containerID="345c34cf99875fbe860f80abf60dcb72da8f284ebef0c7ec0242374b1ff2786f" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.084389 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.104355 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qhqdg" podStartSLOduration=2.623657303 podStartE2EDuration="5.10433814s" podCreationTimestamp="2025-12-03 16:44:11 +0000 UTC" firstStartedPulling="2025-12-03 16:44:13.001470735 +0000 UTC m=+1549.920807158" lastFinishedPulling="2025-12-03 16:44:15.482151542 +0000 UTC m=+1552.401487995" observedRunningTime="2025-12-03 16:44:16.088302528 +0000 UTC m=+1553.007638971" watchObservedRunningTime="2025-12-03 16:44:16.10433814 +0000 UTC m=+1553.023674563" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.118991 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.118973855 podStartE2EDuration="2.118973855s" podCreationTimestamp="2025-12-03 16:44:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:16.11564855 +0000 UTC m=+1553.034984983" watchObservedRunningTime="2025-12-03 16:44:16.118973855 +0000 UTC m=+1553.038310278" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.141254 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.159292 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.172638 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.185414 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.194500 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: E1203 16:44:16.194967 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-metadata" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.194986 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-metadata" Dec 03 16:44:16 crc kubenswrapper[4768]: E1203 16:44:16.195014 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" containerName="kube-state-metrics" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.195024 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" containerName="kube-state-metrics" Dec 03 16:44:16 crc kubenswrapper[4768]: E1203 16:44:16.195065 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-log" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.195071 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-log" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.195291 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" containerName="kube-state-metrics" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.195312 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-log" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.195324 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" containerName="nova-metadata-metadata" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.196148 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.199844 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.207094 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.208324 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.209990 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.214445 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.214825 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.223496 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.232667 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377413 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-config-data\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377738 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377770 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377803 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v88r4\" (UniqueName: \"kubernetes.io/projected/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-api-access-v88r4\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377829 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377872 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377951 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nrqm\" (UniqueName: \"kubernetes.io/projected/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-kube-api-access-4nrqm\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.377977 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-logs\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.378030 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.479639 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-config-data\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480454 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480490 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480525 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v88r4\" (UniqueName: \"kubernetes.io/projected/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-api-access-v88r4\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480552 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480609 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480649 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nrqm\" (UniqueName: \"kubernetes.io/projected/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-kube-api-access-4nrqm\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480673 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-logs\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.480726 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.483772 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-logs\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.485232 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.485544 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.487614 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.497474 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.510286 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-config-data\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.515297 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.515875 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nrqm\" (UniqueName: \"kubernetes.io/projected/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-kube-api-access-4nrqm\") pod \"nova-metadata-0\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.520140 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v88r4\" (UniqueName: \"kubernetes.io/projected/ed50faad-e23a-4fda-b993-1af6764ac5fb-kube-api-access-v88r4\") pod \"kube-state-metrics-0\" (UID: \"ed50faad-e23a-4fda-b993-1af6764ac5fb\") " pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.545582 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.547048 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.728285 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.889807 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-combined-ca-bundle\") pod \"1d6daa39-bd99-4e98-a817-c18efd139e3c\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.889886 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-config-data\") pod \"1d6daa39-bd99-4e98-a817-c18efd139e3c\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.889920 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-scripts\") pod \"1d6daa39-bd99-4e98-a817-c18efd139e3c\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.890135 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htv2h\" (UniqueName: \"kubernetes.io/projected/1d6daa39-bd99-4e98-a817-c18efd139e3c-kube-api-access-htv2h\") pod \"1d6daa39-bd99-4e98-a817-c18efd139e3c\" (UID: \"1d6daa39-bd99-4e98-a817-c18efd139e3c\") " Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.897841 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-scripts" (OuterVolumeSpecName: "scripts") pod "1d6daa39-bd99-4e98-a817-c18efd139e3c" (UID: "1d6daa39-bd99-4e98-a817-c18efd139e3c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.898078 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d6daa39-bd99-4e98-a817-c18efd139e3c-kube-api-access-htv2h" (OuterVolumeSpecName: "kube-api-access-htv2h") pod "1d6daa39-bd99-4e98-a817-c18efd139e3c" (UID: "1d6daa39-bd99-4e98-a817-c18efd139e3c"). InnerVolumeSpecName "kube-api-access-htv2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.922752 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d6daa39-bd99-4e98-a817-c18efd139e3c" (UID: "1d6daa39-bd99-4e98-a817-c18efd139e3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.945780 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-config-data" (OuterVolumeSpecName: "config-data") pod "1d6daa39-bd99-4e98-a817-c18efd139e3c" (UID: "1d6daa39-bd99-4e98-a817-c18efd139e3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.992541 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.992573 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.992582 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htv2h\" (UniqueName: \"kubernetes.io/projected/1d6daa39-bd99-4e98-a817-c18efd139e3c-kube-api-access-htv2h\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:16 crc kubenswrapper[4768]: I1203 16:44:16.992606 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d6daa39-bd99-4e98-a817-c18efd139e3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.123842 4768 generic.go:334] "Generic (PLEG): container finished" podID="f7a3c796-2746-4e52-9501-19dd42475620" containerID="f002cf5cea834dd7401054b1bb543981a2ab408383ef36d6b267d1c1e78e3d50" exitCode=0 Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.123918 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7a3c796-2746-4e52-9501-19dd42475620","Type":"ContainerDied","Data":"f002cf5cea834dd7401054b1bb543981a2ab408383ef36d6b267d1c1e78e3d50"} Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.125494 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.133208 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-zrc69" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.134103 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-zrc69" event={"ID":"1d6daa39-bd99-4e98-a817-c18efd139e3c","Type":"ContainerDied","Data":"f1d878b46f9aa926ecfc195473eafdbe90017e7fc3123ea7ab74e12b632998f6"} Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.134133 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f1d878b46f9aa926ecfc195473eafdbe90017e7fc3123ea7ab74e12b632998f6" Dec 03 16:44:17 crc kubenswrapper[4768]: W1203 16:44:17.140976 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d6e9bf9_5f7b_4ca3_a1d8_83ecebd939d8.slice/crio-a006794678788ac65bb82297281fe7103299980d47a14cd0ba5fbe7d2e331175 WatchSource:0}: Error finding container a006794678788ac65bb82297281fe7103299980d47a14cd0ba5fbe7d2e331175: Status 404 returned error can't find the container with id a006794678788ac65bb82297281fe7103299980d47a14cd0ba5fbe7d2e331175 Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.217529 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:44:17 crc kubenswrapper[4768]: E1203 16:44:17.218100 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d6daa39-bd99-4e98-a817-c18efd139e3c" containerName="nova-cell1-conductor-db-sync" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.218119 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d6daa39-bd99-4e98-a817-c18efd139e3c" containerName="nova-cell1-conductor-db-sync" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.218309 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d6daa39-bd99-4e98-a817-c18efd139e3c" containerName="nova-cell1-conductor-db-sync" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.219155 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.222205 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.238956 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.252202 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.298615 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891b6c40-c436-4d8a-a035-c49252143ce1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.298689 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891b6c40-c436-4d8a-a035-c49252143ce1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.298780 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhrj2\" (UniqueName: \"kubernetes.io/projected/891b6c40-c436-4d8a-a035-c49252143ce1-kube-api-access-mhrj2\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.315250 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.315789 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-central-agent" containerID="cri-o://990d81437d8ce78ccc501e312e8b92bd1da96095e288c63eadd5b3200a2cbc9d" gracePeriod=30 Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.316052 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="sg-core" containerID="cri-o://dd1046965dad7d0bf31a456c9e7fb8ddad2c9582ded828c3f460d06b8fa38ce3" gracePeriod=30 Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.316126 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="proxy-httpd" containerID="cri-o://4fcf75c76a3680c219fd7a49c047994c3d54d4cddee095e15187fd0251b4d0a9" gracePeriod=30 Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.316162 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-notification-agent" containerID="cri-o://6da18d4b0ed0b271cbce252133b158713108e817aecddd6a1d095e0c1dac9cec" gracePeriod=30 Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.368384 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.401119 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891b6c40-c436-4d8a-a035-c49252143ce1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.401218 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891b6c40-c436-4d8a-a035-c49252143ce1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.401346 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhrj2\" (UniqueName: \"kubernetes.io/projected/891b6c40-c436-4d8a-a035-c49252143ce1-kube-api-access-mhrj2\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.406280 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891b6c40-c436-4d8a-a035-c49252143ce1-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.408016 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891b6c40-c436-4d8a-a035-c49252143ce1-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.426254 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhrj2\" (UniqueName: \"kubernetes.io/projected/891b6c40-c436-4d8a-a035-c49252143ce1-kube-api-access-mhrj2\") pod \"nova-cell1-conductor-0\" (UID: \"891b6c40-c436-4d8a-a035-c49252143ce1\") " pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.502587 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-combined-ca-bundle\") pod \"f7a3c796-2746-4e52-9501-19dd42475620\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.502745 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-config-data\") pod \"f7a3c796-2746-4e52-9501-19dd42475620\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.502827 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nk8pz\" (UniqueName: \"kubernetes.io/projected/f7a3c796-2746-4e52-9501-19dd42475620-kube-api-access-nk8pz\") pod \"f7a3c796-2746-4e52-9501-19dd42475620\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.502941 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a3c796-2746-4e52-9501-19dd42475620-logs\") pod \"f7a3c796-2746-4e52-9501-19dd42475620\" (UID: \"f7a3c796-2746-4e52-9501-19dd42475620\") " Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.504618 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7a3c796-2746-4e52-9501-19dd42475620-logs" (OuterVolumeSpecName: "logs") pod "f7a3c796-2746-4e52-9501-19dd42475620" (UID: "f7a3c796-2746-4e52-9501-19dd42475620"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.509408 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7a3c796-2746-4e52-9501-19dd42475620-kube-api-access-nk8pz" (OuterVolumeSpecName: "kube-api-access-nk8pz") pod "f7a3c796-2746-4e52-9501-19dd42475620" (UID: "f7a3c796-2746-4e52-9501-19dd42475620"). InnerVolumeSpecName "kube-api-access-nk8pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.545346 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.545351 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7a3c796-2746-4e52-9501-19dd42475620" (UID: "f7a3c796-2746-4e52-9501-19dd42475620"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.548423 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-config-data" (OuterVolumeSpecName: "config-data") pod "f7a3c796-2746-4e52-9501-19dd42475620" (UID: "f7a3c796-2746-4e52-9501-19dd42475620"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.556285 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="532862e3-2ede-4002-89a4-8a0131cdf0f5" path="/var/lib/kubelet/pods/532862e3-2ede-4002-89a4-8a0131cdf0f5/volumes" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.556865 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f82fe5c1-e056-4fbb-bca3-7552b85daa9b" path="/var/lib/kubelet/pods/f82fe5c1-e056-4fbb-bca3-7552b85daa9b/volumes" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.606088 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7a3c796-2746-4e52-9501-19dd42475620-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.606134 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.606147 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7a3c796-2746-4e52-9501-19dd42475620-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.606157 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nk8pz\" (UniqueName: \"kubernetes.io/projected/f7a3c796-2746-4e52-9501-19dd42475620-kube-api-access-nk8pz\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:17 crc kubenswrapper[4768]: I1203 16:44:17.923064 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 03 16:44:17 crc kubenswrapper[4768]: W1203 16:44:17.939647 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod891b6c40_c436_4d8a_a035_c49252143ce1.slice/crio-340796c302a1fff96ee377840ce4e29739636a6c918514cf794e6270cbbb25c1 WatchSource:0}: Error finding container 340796c302a1fff96ee377840ce4e29739636a6c918514cf794e6270cbbb25c1: Status 404 returned error can't find the container with id 340796c302a1fff96ee377840ce4e29739636a6c918514cf794e6270cbbb25c1 Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.162999 4768 generic.go:334] "Generic (PLEG): container finished" podID="47238918-7f35-4a33-9439-481a29a5e4c0" containerID="4fcf75c76a3680c219fd7a49c047994c3d54d4cddee095e15187fd0251b4d0a9" exitCode=0 Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.163045 4768 generic.go:334] "Generic (PLEG): container finished" podID="47238918-7f35-4a33-9439-481a29a5e4c0" containerID="dd1046965dad7d0bf31a456c9e7fb8ddad2c9582ded828c3f460d06b8fa38ce3" exitCode=2 Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.163053 4768 generic.go:334] "Generic (PLEG): container finished" podID="47238918-7f35-4a33-9439-481a29a5e4c0" containerID="990d81437d8ce78ccc501e312e8b92bd1da96095e288c63eadd5b3200a2cbc9d" exitCode=0 Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.163116 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerDied","Data":"4fcf75c76a3680c219fd7a49c047994c3d54d4cddee095e15187fd0251b4d0a9"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.163163 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerDied","Data":"dd1046965dad7d0bf31a456c9e7fb8ddad2c9582ded828c3f460d06b8fa38ce3"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.163180 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerDied","Data":"990d81437d8ce78ccc501e312e8b92bd1da96095e288c63eadd5b3200a2cbc9d"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.168450 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7a3c796-2746-4e52-9501-19dd42475620","Type":"ContainerDied","Data":"16b37be1c2f0af825cccd483fe44eb3503cbdddac271a0004a1aa4ce29ed35d1"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.168516 4768 scope.go:117] "RemoveContainer" containerID="f002cf5cea834dd7401054b1bb543981a2ab408383ef36d6b267d1c1e78e3d50" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.168701 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.184992 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8","Type":"ContainerStarted","Data":"02a8d88cd74635ce325de2b36f603da708a368b9eed9b031fff02f70670663ad"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.185046 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8","Type":"ContainerStarted","Data":"3cc120048fc591b79d31e15701a73dcf3c911299b6e9987361e4017ee66aba36"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.185058 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8","Type":"ContainerStarted","Data":"a006794678788ac65bb82297281fe7103299980d47a14cd0ba5fbe7d2e331175"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.195134 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ed50faad-e23a-4fda-b993-1af6764ac5fb","Type":"ContainerStarted","Data":"72fc1f0f6afdfe65ce67e3effd09ee8a56d18aeae7cd9d49b09a89337660821c"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.195177 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"ed50faad-e23a-4fda-b993-1af6764ac5fb","Type":"ContainerStarted","Data":"127badc6d68d611e01c0f8afd8b47fb27972602be83ad05277e3caf8dd663bac"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.196031 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.199702 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.211352 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"891b6c40-c436-4d8a-a035-c49252143ce1","Type":"ContainerStarted","Data":"340796c302a1fff96ee377840ce4e29739636a6c918514cf794e6270cbbb25c1"} Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.212328 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.212931 4768 scope.go:117] "RemoveContainer" containerID="c8e550b8110760be55360e341b5df93c042c778826c1f82d32a3f13d0b09a0c7" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.234780 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.246303 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.246281644 podStartE2EDuration="2.246281644s" podCreationTimestamp="2025-12-03 16:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:18.217491665 +0000 UTC m=+1555.136828098" watchObservedRunningTime="2025-12-03 16:44:18.246281644 +0000 UTC m=+1555.165618057" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.271646 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:18 crc kubenswrapper[4768]: E1203 16:44:18.272100 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-api" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.272117 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-api" Dec 03 16:44:18 crc kubenswrapper[4768]: E1203 16:44:18.272134 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-log" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.272140 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-log" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.272335 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-log" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.272354 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7a3c796-2746-4e52-9501-19dd42475620" containerName="nova-api-api" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.273518 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.275955 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.275942635 podStartE2EDuration="1.275942635s" podCreationTimestamp="2025-12-03 16:44:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:18.236615226 +0000 UTC m=+1555.155951649" watchObservedRunningTime="2025-12-03 16:44:18.275942635 +0000 UTC m=+1555.195279058" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.279093 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.294217 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.296222 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.882364526 podStartE2EDuration="2.296210106s" podCreationTimestamp="2025-12-03 16:44:16 +0000 UTC" firstStartedPulling="2025-12-03 16:44:17.236854732 +0000 UTC m=+1554.156191155" lastFinishedPulling="2025-12-03 16:44:17.650700312 +0000 UTC m=+1554.570036735" observedRunningTime="2025-12-03 16:44:18.256873536 +0000 UTC m=+1555.176209969" watchObservedRunningTime="2025-12-03 16:44:18.296210106 +0000 UTC m=+1555.215546529" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.420619 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-config-data\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.420662 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.420725 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qlfw\" (UniqueName: \"kubernetes.io/projected/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-kube-api-access-2qlfw\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.420810 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-logs\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.522142 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-logs\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.522484 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-config-data\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.522510 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.522624 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qlfw\" (UniqueName: \"kubernetes.io/projected/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-kube-api-access-2qlfw\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.522785 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-logs\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.527658 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-config-data\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.528554 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.541994 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qlfw\" (UniqueName: \"kubernetes.io/projected/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-kube-api-access-2qlfw\") pod \"nova-api-0\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " pod="openstack/nova-api-0" Dec 03 16:44:18 crc kubenswrapper[4768]: I1203 16:44:18.595122 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:19 crc kubenswrapper[4768]: I1203 16:44:19.067567 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:19 crc kubenswrapper[4768]: I1203 16:44:19.222933 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"891b6c40-c436-4d8a-a035-c49252143ce1","Type":"ContainerStarted","Data":"c37f8769cf1cef61ae879c40ebd769470a3581edcd57684e6f6d74857cd148ae"} Dec 03 16:44:19 crc kubenswrapper[4768]: I1203 16:44:19.225618 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4a61d901-34fa-4f3c-999d-5867aeaf8e3a","Type":"ContainerStarted","Data":"6c0e0658f69f5c93dbbccaadb31e9b72fd544f5b2c38be3a2bbf9cb6e6478e2e"} Dec 03 16:44:19 crc kubenswrapper[4768]: I1203 16:44:19.425891 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 16:44:19 crc kubenswrapper[4768]: I1203 16:44:19.541986 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7a3c796-2746-4e52-9501-19dd42475620" path="/var/lib/kubelet/pods/f7a3c796-2746-4e52-9501-19dd42475620/volumes" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.247506 4768 generic.go:334] "Generic (PLEG): container finished" podID="47238918-7f35-4a33-9439-481a29a5e4c0" containerID="6da18d4b0ed0b271cbce252133b158713108e817aecddd6a1d095e0c1dac9cec" exitCode=0 Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.247547 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerDied","Data":"6da18d4b0ed0b271cbce252133b158713108e817aecddd6a1d095e0c1dac9cec"} Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.252533 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4a61d901-34fa-4f3c-999d-5867aeaf8e3a","Type":"ContainerStarted","Data":"0346c176dc90077c526ec55f48c5438df729d085e71182ec99566d908fd39b00"} Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.252573 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4a61d901-34fa-4f3c-999d-5867aeaf8e3a","Type":"ContainerStarted","Data":"053d729556746c14f91ff5d6925566b663f5b9f392c85e91115f16e53e9bf848"} Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.279969 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.279950711 podStartE2EDuration="2.279950711s" podCreationTimestamp="2025-12-03 16:44:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:20.27523767 +0000 UTC m=+1557.194574093" watchObservedRunningTime="2025-12-03 16:44:20.279950711 +0000 UTC m=+1557.199287144" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.413156 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.571633 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-combined-ca-bundle\") pod \"47238918-7f35-4a33-9439-481a29a5e4c0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.571827 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-sg-core-conf-yaml\") pod \"47238918-7f35-4a33-9439-481a29a5e4c0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.571883 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-config-data\") pod \"47238918-7f35-4a33-9439-481a29a5e4c0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.571919 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-log-httpd\") pod \"47238918-7f35-4a33-9439-481a29a5e4c0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.572037 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-822hj\" (UniqueName: \"kubernetes.io/projected/47238918-7f35-4a33-9439-481a29a5e4c0-kube-api-access-822hj\") pod \"47238918-7f35-4a33-9439-481a29a5e4c0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.572069 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-run-httpd\") pod \"47238918-7f35-4a33-9439-481a29a5e4c0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.572127 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-scripts\") pod \"47238918-7f35-4a33-9439-481a29a5e4c0\" (UID: \"47238918-7f35-4a33-9439-481a29a5e4c0\") " Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.573163 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "47238918-7f35-4a33-9439-481a29a5e4c0" (UID: "47238918-7f35-4a33-9439-481a29a5e4c0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.573551 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "47238918-7f35-4a33-9439-481a29a5e4c0" (UID: "47238918-7f35-4a33-9439-481a29a5e4c0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.577762 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47238918-7f35-4a33-9439-481a29a5e4c0-kube-api-access-822hj" (OuterVolumeSpecName: "kube-api-access-822hj") pod "47238918-7f35-4a33-9439-481a29a5e4c0" (UID: "47238918-7f35-4a33-9439-481a29a5e4c0"). InnerVolumeSpecName "kube-api-access-822hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.597150 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-scripts" (OuterVolumeSpecName: "scripts") pod "47238918-7f35-4a33-9439-481a29a5e4c0" (UID: "47238918-7f35-4a33-9439-481a29a5e4c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.603785 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "47238918-7f35-4a33-9439-481a29a5e4c0" (UID: "47238918-7f35-4a33-9439-481a29a5e4c0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.672242 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47238918-7f35-4a33-9439-481a29a5e4c0" (UID: "47238918-7f35-4a33-9439-481a29a5e4c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.675342 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.675387 4768 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.675401 4768 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.675413 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-822hj\" (UniqueName: \"kubernetes.io/projected/47238918-7f35-4a33-9439-481a29a5e4c0-kube-api-access-822hj\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.675425 4768 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/47238918-7f35-4a33-9439-481a29a5e4c0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.675436 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.686435 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-config-data" (OuterVolumeSpecName: "config-data") pod "47238918-7f35-4a33-9439-481a29a5e4c0" (UID: "47238918-7f35-4a33-9439-481a29a5e4c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:20 crc kubenswrapper[4768]: I1203 16:44:20.776946 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47238918-7f35-4a33-9439-481a29a5e4c0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.264027 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"47238918-7f35-4a33-9439-481a29a5e4c0","Type":"ContainerDied","Data":"8a90119d9f04263292efe3dc21df950b5c2aece9cd0e42df358bc9acae744f9c"} Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.264067 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.264115 4768 scope.go:117] "RemoveContainer" containerID="4fcf75c76a3680c219fd7a49c047994c3d54d4cddee095e15187fd0251b4d0a9" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.296571 4768 scope.go:117] "RemoveContainer" containerID="dd1046965dad7d0bf31a456c9e7fb8ddad2c9582ded828c3f460d06b8fa38ce3" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.316184 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.320682 4768 scope.go:117] "RemoveContainer" containerID="6da18d4b0ed0b271cbce252133b158713108e817aecddd6a1d095e0c1dac9cec" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.331812 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.341141 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.344045 4768 scope.go:117] "RemoveContainer" containerID="990d81437d8ce78ccc501e312e8b92bd1da96095e288c63eadd5b3200a2cbc9d" Dec 03 16:44:21 crc kubenswrapper[4768]: E1203 16:44:21.344564 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="sg-core" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.344603 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="sg-core" Dec 03 16:44:21 crc kubenswrapper[4768]: E1203 16:44:21.344639 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-notification-agent" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.344646 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-notification-agent" Dec 03 16:44:21 crc kubenswrapper[4768]: E1203 16:44:21.344666 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="proxy-httpd" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.344672 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="proxy-httpd" Dec 03 16:44:21 crc kubenswrapper[4768]: E1203 16:44:21.344698 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-central-agent" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.344705 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-central-agent" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.347015 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-central-agent" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.347034 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="sg-core" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.347064 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="proxy-httpd" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.347081 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" containerName="ceilometer-notification-agent" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.358550 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.365257 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.365791 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.368477 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.405540 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.493866 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-log-httpd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.493946 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.494020 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-scripts\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.494040 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-484gd\" (UniqueName: \"kubernetes.io/projected/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-kube-api-access-484gd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.494066 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.494130 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-run-httpd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.494162 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.494503 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-config-data\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.543837 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47238918-7f35-4a33-9439-481a29a5e4c0" path="/var/lib/kubelet/pods/47238918-7f35-4a33-9439-481a29a5e4c0/volumes" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.547029 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.547067 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597024 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597170 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-config-data\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597243 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-log-httpd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597289 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597343 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-scripts\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597370 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-484gd\" (UniqueName: \"kubernetes.io/projected/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-kube-api-access-484gd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597398 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.597477 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-run-httpd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.598021 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-log-httpd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.598831 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-run-httpd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.603093 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-scripts\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.603300 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.603411 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.605526 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-config-data\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.606366 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.619736 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-484gd\" (UniqueName: \"kubernetes.io/projected/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-kube-api-access-484gd\") pod \"ceilometer-0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " pod="openstack/ceilometer-0" Dec 03 16:44:21 crc kubenswrapper[4768]: I1203 16:44:21.708069 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:22 crc kubenswrapper[4768]: I1203 16:44:22.044658 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:22 crc kubenswrapper[4768]: I1203 16:44:22.044950 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:22 crc kubenswrapper[4768]: I1203 16:44:22.103898 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:22 crc kubenswrapper[4768]: I1203 16:44:22.205193 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:22 crc kubenswrapper[4768]: W1203 16:44:22.205736 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a2dbb6c_b9a1_49e3_883e_404aca7ccfe0.slice/crio-e82244a04c2472a7fe35aeb904bd36681bd442cbca647baef013d58e50f3db2f WatchSource:0}: Error finding container e82244a04c2472a7fe35aeb904bd36681bd442cbca647baef013d58e50f3db2f: Status 404 returned error can't find the container with id e82244a04c2472a7fe35aeb904bd36681bd442cbca647baef013d58e50f3db2f Dec 03 16:44:22 crc kubenswrapper[4768]: I1203 16:44:22.280076 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerStarted","Data":"e82244a04c2472a7fe35aeb904bd36681bd442cbca647baef013d58e50f3db2f"} Dec 03 16:44:22 crc kubenswrapper[4768]: I1203 16:44:22.346547 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:22 crc kubenswrapper[4768]: I1203 16:44:22.407497 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhqdg"] Dec 03 16:44:23 crc kubenswrapper[4768]: I1203 16:44:23.295041 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerStarted","Data":"daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195"} Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.316960 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerStarted","Data":"94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70"} Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.317108 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qhqdg" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="registry-server" containerID="cri-o://690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1" gracePeriod=2 Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.425708 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.458742 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.909790 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.996354 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcszc\" (UniqueName: \"kubernetes.io/projected/79dd49d2-682f-4df3-ba0b-7f120289b0aa-kube-api-access-vcszc\") pod \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.996417 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-catalog-content\") pod \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.996583 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-utilities\") pod \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\" (UID: \"79dd49d2-682f-4df3-ba0b-7f120289b0aa\") " Dec 03 16:44:24 crc kubenswrapper[4768]: I1203 16:44:24.997869 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-utilities" (OuterVolumeSpecName: "utilities") pod "79dd49d2-682f-4df3-ba0b-7f120289b0aa" (UID: "79dd49d2-682f-4df3-ba0b-7f120289b0aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.007961 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79dd49d2-682f-4df3-ba0b-7f120289b0aa-kube-api-access-vcszc" (OuterVolumeSpecName: "kube-api-access-vcszc") pod "79dd49d2-682f-4df3-ba0b-7f120289b0aa" (UID: "79dd49d2-682f-4df3-ba0b-7f120289b0aa"). InnerVolumeSpecName "kube-api-access-vcszc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.037750 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79dd49d2-682f-4df3-ba0b-7f120289b0aa" (UID: "79dd49d2-682f-4df3-ba0b-7f120289b0aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.099201 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcszc\" (UniqueName: \"kubernetes.io/projected/79dd49d2-682f-4df3-ba0b-7f120289b0aa-kube-api-access-vcszc\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.099238 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.099247 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79dd49d2-682f-4df3-ba0b-7f120289b0aa-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.332295 4768 generic.go:334] "Generic (PLEG): container finished" podID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerID="690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1" exitCode=0 Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.332349 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qhqdg" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.332374 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhqdg" event={"ID":"79dd49d2-682f-4df3-ba0b-7f120289b0aa","Type":"ContainerDied","Data":"690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1"} Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.333852 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qhqdg" event={"ID":"79dd49d2-682f-4df3-ba0b-7f120289b0aa","Type":"ContainerDied","Data":"0b8f8af318b9fa295af9b0a849249c9abad8dae19d4c7f894b7e1a8abc309a28"} Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.333884 4768 scope.go:117] "RemoveContainer" containerID="690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.366268 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.404729 4768 scope.go:117] "RemoveContainer" containerID="727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.435622 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhqdg"] Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.445637 4768 scope.go:117] "RemoveContainer" containerID="f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.450361 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qhqdg"] Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.472731 4768 scope.go:117] "RemoveContainer" containerID="690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1" Dec 03 16:44:25 crc kubenswrapper[4768]: E1203 16:44:25.473215 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1\": container with ID starting with 690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1 not found: ID does not exist" containerID="690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.473333 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1"} err="failed to get container status \"690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1\": rpc error: code = NotFound desc = could not find container \"690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1\": container with ID starting with 690345276dd6ec51c050c2d9aba1f6b29117a9c2b07b640d45c210af5d122fb1 not found: ID does not exist" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.473441 4768 scope.go:117] "RemoveContainer" containerID="727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77" Dec 03 16:44:25 crc kubenswrapper[4768]: E1203 16:44:25.473808 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77\": container with ID starting with 727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77 not found: ID does not exist" containerID="727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.473908 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77"} err="failed to get container status \"727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77\": rpc error: code = NotFound desc = could not find container \"727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77\": container with ID starting with 727ad430f9b724c4d5b564775eae452f84a8e1bedd62e2b4d38e9a1f7c5dff77 not found: ID does not exist" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.473976 4768 scope.go:117] "RemoveContainer" containerID="f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159" Dec 03 16:44:25 crc kubenswrapper[4768]: E1203 16:44:25.474675 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159\": container with ID starting with f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159 not found: ID does not exist" containerID="f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.474711 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159"} err="failed to get container status \"f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159\": rpc error: code = NotFound desc = could not find container \"f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159\": container with ID starting with f07f6715a238019936b1627bfd9f813574f3091f34281794fc283f46644fd159 not found: ID does not exist" Dec 03 16:44:25 crc kubenswrapper[4768]: I1203 16:44:25.545062 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" path="/var/lib/kubelet/pods/79dd49d2-682f-4df3-ba0b-7f120289b0aa/volumes" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.028830 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.029198 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.029250 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.030175 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.030251 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" gracePeriod=600 Dec 03 16:44:26 crc kubenswrapper[4768]: E1203 16:44:26.162781 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.344990 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerStarted","Data":"ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8"} Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.350089 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" exitCode=0 Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.351083 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9"} Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.351212 4768 scope.go:117] "RemoveContainer" containerID="8838d0fd2aebc4b8c2edf99cdba00c8f80a4f57bd4eb36faafb771ad2678be15" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.351666 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:44:26 crc kubenswrapper[4768]: E1203 16:44:26.352009 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.546925 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.546974 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:44:26 crc kubenswrapper[4768]: I1203 16:44:26.578146 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 03 16:44:27 crc kubenswrapper[4768]: I1203 16:44:27.361763 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerStarted","Data":"2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a"} Dec 03 16:44:27 crc kubenswrapper[4768]: I1203 16:44:27.362425 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:44:27 crc kubenswrapper[4768]: I1203 16:44:27.389533 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.657238875 podStartE2EDuration="6.389513541s" podCreationTimestamp="2025-12-03 16:44:21 +0000 UTC" firstStartedPulling="2025-12-03 16:44:22.207481964 +0000 UTC m=+1559.126818387" lastFinishedPulling="2025-12-03 16:44:26.93975663 +0000 UTC m=+1563.859093053" observedRunningTime="2025-12-03 16:44:27.383907478 +0000 UTC m=+1564.303243921" watchObservedRunningTime="2025-12-03 16:44:27.389513541 +0000 UTC m=+1564.308849964" Dec 03 16:44:27 crc kubenswrapper[4768]: I1203 16:44:27.576883 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.219:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:44:27 crc kubenswrapper[4768]: I1203 16:44:27.577231 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.219:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:44:27 crc kubenswrapper[4768]: I1203 16:44:27.585472 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 03 16:44:28 crc kubenswrapper[4768]: I1203 16:44:28.600339 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:44:28 crc kubenswrapper[4768]: I1203 16:44:28.600677 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:44:29 crc kubenswrapper[4768]: I1203 16:44:29.683885 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.221:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:44:29 crc kubenswrapper[4768]: I1203 16:44:29.683953 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.221:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.404662 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.520561 4768 generic.go:334] "Generic (PLEG): container finished" podID="0264e152-fc1f-4658-bb13-cbbc8c713b73" containerID="1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8" exitCode=137 Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.520623 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0264e152-fc1f-4658-bb13-cbbc8c713b73","Type":"ContainerDied","Data":"1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8"} Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.520667 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"0264e152-fc1f-4658-bb13-cbbc8c713b73","Type":"ContainerDied","Data":"aba584784a56af786950d94d4f3513800cc65dfa740e7f0adba688b5106ceb8d"} Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.520648 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.520685 4768 scope.go:117] "RemoveContainer" containerID="1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.544672 4768 scope.go:117] "RemoveContainer" containerID="1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8" Dec 03 16:44:36 crc kubenswrapper[4768]: E1203 16:44:36.545219 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8\": container with ID starting with 1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8 not found: ID does not exist" containerID="1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.545402 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8"} err="failed to get container status \"1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8\": rpc error: code = NotFound desc = could not find container \"1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8\": container with ID starting with 1ef954a62e959b001844d3a4f6fdbae4039be14016f9a1d95a8a9045da4d85b8 not found: ID does not exist" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.556551 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.556914 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.557238 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-config-data\") pod \"0264e152-fc1f-4658-bb13-cbbc8c713b73\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.557444 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-combined-ca-bundle\") pod \"0264e152-fc1f-4658-bb13-cbbc8c713b73\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.557523 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h495v\" (UniqueName: \"kubernetes.io/projected/0264e152-fc1f-4658-bb13-cbbc8c713b73-kube-api-access-h495v\") pod \"0264e152-fc1f-4658-bb13-cbbc8c713b73\" (UID: \"0264e152-fc1f-4658-bb13-cbbc8c713b73\") " Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.571721 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0264e152-fc1f-4658-bb13-cbbc8c713b73-kube-api-access-h495v" (OuterVolumeSpecName: "kube-api-access-h495v") pod "0264e152-fc1f-4658-bb13-cbbc8c713b73" (UID: "0264e152-fc1f-4658-bb13-cbbc8c713b73"). InnerVolumeSpecName "kube-api-access-h495v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.571932 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.601445 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-config-data" (OuterVolumeSpecName: "config-data") pod "0264e152-fc1f-4658-bb13-cbbc8c713b73" (UID: "0264e152-fc1f-4658-bb13-cbbc8c713b73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.610778 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0264e152-fc1f-4658-bb13-cbbc8c713b73" (UID: "0264e152-fc1f-4658-bb13-cbbc8c713b73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.661064 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.661093 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h495v\" (UniqueName: \"kubernetes.io/projected/0264e152-fc1f-4658-bb13-cbbc8c713b73-kube-api-access-h495v\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.661103 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0264e152-fc1f-4658-bb13-cbbc8c713b73-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.884580 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.906046 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.922236 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:44:36 crc kubenswrapper[4768]: E1203 16:44:36.922850 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="extract-utilities" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.922878 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="extract-utilities" Dec 03 16:44:36 crc kubenswrapper[4768]: E1203 16:44:36.922895 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0264e152-fc1f-4658-bb13-cbbc8c713b73" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.922904 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0264e152-fc1f-4658-bb13-cbbc8c713b73" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:44:36 crc kubenswrapper[4768]: E1203 16:44:36.922917 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="extract-content" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.922925 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="extract-content" Dec 03 16:44:36 crc kubenswrapper[4768]: E1203 16:44:36.922960 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="registry-server" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.922968 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="registry-server" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.923220 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0264e152-fc1f-4658-bb13-cbbc8c713b73" containerName="nova-cell1-novncproxy-novncproxy" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.923253 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="79dd49d2-682f-4df3-ba0b-7f120289b0aa" containerName="registry-server" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.924329 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.929453 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.930981 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.931011 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 03 16:44:36 crc kubenswrapper[4768]: I1203 16:44:36.935510 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.069584 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvxl7\" (UniqueName: \"kubernetes.io/projected/a8801603-67b7-4a04-b05e-de0651787247-kube-api-access-pvxl7\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.069658 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.069773 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.069920 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.069970 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.171616 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvxl7\" (UniqueName: \"kubernetes.io/projected/a8801603-67b7-4a04-b05e-de0651787247-kube-api-access-pvxl7\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.171666 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.171757 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.171829 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.171857 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.176106 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.176583 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.176898 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.177512 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8801603-67b7-4a04-b05e-de0651787247-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.193648 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvxl7\" (UniqueName: \"kubernetes.io/projected/a8801603-67b7-4a04-b05e-de0651787247-kube-api-access-pvxl7\") pod \"nova-cell1-novncproxy-0\" (UID: \"a8801603-67b7-4a04-b05e-de0651787247\") " pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.248704 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.545297 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0264e152-fc1f-4658-bb13-cbbc8c713b73" path="/var/lib/kubelet/pods/0264e152-fc1f-4658-bb13-cbbc8c713b73/volumes" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.546303 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 16:44:37 crc kubenswrapper[4768]: I1203 16:44:37.740284 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 03 16:44:37 crc kubenswrapper[4768]: W1203 16:44:37.749828 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8801603_67b7_4a04_b05e_de0651787247.slice/crio-58eb712bcda219d9c6ab8cd742b30360ef810fcc5f57882ec6f0c8997d0f4aa0 WatchSource:0}: Error finding container 58eb712bcda219d9c6ab8cd742b30360ef810fcc5f57882ec6f0c8997d0f4aa0: Status 404 returned error can't find the container with id 58eb712bcda219d9c6ab8cd742b30360ef810fcc5f57882ec6f0c8997d0f4aa0 Dec 03 16:44:38 crc kubenswrapper[4768]: I1203 16:44:38.565951 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8801603-67b7-4a04-b05e-de0651787247","Type":"ContainerStarted","Data":"d9d4a80c2d41851285d09d2d7058b24b37cd809d677fe9d055b492808c790385"} Dec 03 16:44:38 crc kubenswrapper[4768]: I1203 16:44:38.566446 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a8801603-67b7-4a04-b05e-de0651787247","Type":"ContainerStarted","Data":"58eb712bcda219d9c6ab8cd742b30360ef810fcc5f57882ec6f0c8997d0f4aa0"} Dec 03 16:44:38 crc kubenswrapper[4768]: I1203 16:44:38.603759 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:44:38 crc kubenswrapper[4768]: I1203 16:44:38.604255 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:44:38 crc kubenswrapper[4768]: I1203 16:44:38.605579 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.605557039 podStartE2EDuration="2.605557039s" podCreationTimestamp="2025-12-03 16:44:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:38.592956645 +0000 UTC m=+1575.512293068" watchObservedRunningTime="2025-12-03 16:44:38.605557039 +0000 UTC m=+1575.524893462" Dec 03 16:44:38 crc kubenswrapper[4768]: I1203 16:44:38.607946 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:44:38 crc kubenswrapper[4768]: I1203 16:44:38.623765 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.577658 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.581562 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.800490 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-s9mgv"] Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.802402 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.839123 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-s9mgv"] Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.938890 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.938930 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.938981 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-config\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.939044 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.939068 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:39 crc kubenswrapper[4768]: I1203 16:44:39.939088 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv9d4\" (UniqueName: \"kubernetes.io/projected/5efd08b0-6167-405c-9da0-121a9e3c3b55-kube-api-access-wv9d4\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.041507 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.041568 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.041727 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-config\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.041814 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.041843 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.041877 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv9d4\" (UniqueName: \"kubernetes.io/projected/5efd08b0-6167-405c-9da0-121a9e3c3b55-kube-api-access-wv9d4\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.043084 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-nb\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.043235 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-swift-storage-0\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.043351 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-sb\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.043502 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-svc\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.043854 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-config\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.066372 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv9d4\" (UniqueName: \"kubernetes.io/projected/5efd08b0-6167-405c-9da0-121a9e3c3b55-kube-api-access-wv9d4\") pod \"dnsmasq-dns-5fd9b586ff-s9mgv\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.147466 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:40 crc kubenswrapper[4768]: I1203 16:44:40.667089 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-s9mgv"] Dec 03 16:44:40 crc kubenswrapper[4768]: W1203 16:44:40.672081 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5efd08b0_6167_405c_9da0_121a9e3c3b55.slice/crio-bd1f041f6a0588651c4c3e4e1fb0a856245f5828149213d8782996aa0d4f048f WatchSource:0}: Error finding container bd1f041f6a0588651c4c3e4e1fb0a856245f5828149213d8782996aa0d4f048f: Status 404 returned error can't find the container with id bd1f041f6a0588651c4c3e4e1fb0a856245f5828149213d8782996aa0d4f048f Dec 03 16:44:41 crc kubenswrapper[4768]: I1203 16:44:41.536791 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:44:41 crc kubenswrapper[4768]: E1203 16:44:41.537447 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:44:41 crc kubenswrapper[4768]: I1203 16:44:41.607231 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" event={"ID":"5efd08b0-6167-405c-9da0-121a9e3c3b55","Type":"ContainerStarted","Data":"147ec30adeefebdacc3803387aaac37aa5e4ca8ad0d7b60f92db5ef60a5a9c13"} Dec 03 16:44:41 crc kubenswrapper[4768]: I1203 16:44:41.607289 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" event={"ID":"5efd08b0-6167-405c-9da0-121a9e3c3b55","Type":"ContainerStarted","Data":"bd1f041f6a0588651c4c3e4e1fb0a856245f5828149213d8782996aa0d4f048f"} Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.249160 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.547674 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.547976 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-central-agent" containerID="cri-o://daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195" gracePeriod=30 Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.548012 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="proxy-httpd" containerID="cri-o://2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a" gracePeriod=30 Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.548086 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-notification-agent" containerID="cri-o://94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70" gracePeriod=30 Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.548123 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="sg-core" containerID="cri-o://ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8" gracePeriod=30 Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.562656 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.222:3000/\": EOF" Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.619348 4768 generic.go:334] "Generic (PLEG): container finished" podID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerID="147ec30adeefebdacc3803387aaac37aa5e4ca8ad0d7b60f92db5ef60a5a9c13" exitCode=0 Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.619388 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" event={"ID":"5efd08b0-6167-405c-9da0-121a9e3c3b55","Type":"ContainerDied","Data":"147ec30adeefebdacc3803387aaac37aa5e4ca8ad0d7b60f92db5ef60a5a9c13"} Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.619413 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" event={"ID":"5efd08b0-6167-405c-9da0-121a9e3c3b55","Type":"ContainerStarted","Data":"ab85698c277b7b82ffa6d8351724a304c6f550993a10d7539404a7e4aeea57af"} Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.619887 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.643560 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" podStartSLOduration=3.643539998 podStartE2EDuration="3.643539998s" podCreationTimestamp="2025-12-03 16:44:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:42.637837822 +0000 UTC m=+1579.557174265" watchObservedRunningTime="2025-12-03 16:44:42.643539998 +0000 UTC m=+1579.562876421" Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.842615 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.843019 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-log" containerID="cri-o://053d729556746c14f91ff5d6925566b663f5b9f392c85e91115f16e53e9bf848" gracePeriod=30 Dec 03 16:44:42 crc kubenswrapper[4768]: I1203 16:44:42.846875 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-api" containerID="cri-o://0346c176dc90077c526ec55f48c5438df729d085e71182ec99566d908fd39b00" gracePeriod=30 Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.632136 4768 generic.go:334] "Generic (PLEG): container finished" podID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerID="2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a" exitCode=0 Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.632163 4768 generic.go:334] "Generic (PLEG): container finished" podID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerID="ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8" exitCode=2 Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.632172 4768 generic.go:334] "Generic (PLEG): container finished" podID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerID="daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195" exitCode=0 Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.632208 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerDied","Data":"2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a"} Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.632259 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerDied","Data":"ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8"} Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.632273 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerDied","Data":"daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195"} Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.633682 4768 generic.go:334] "Generic (PLEG): container finished" podID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerID="053d729556746c14f91ff5d6925566b663f5b9f392c85e91115f16e53e9bf848" exitCode=143 Dec 03 16:44:43 crc kubenswrapper[4768]: I1203 16:44:43.633759 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4a61d901-34fa-4f3c-999d-5867aeaf8e3a","Type":"ContainerDied","Data":"053d729556746c14f91ff5d6925566b663f5b9f392c85e91115f16e53e9bf848"} Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.661915 4768 generic.go:334] "Generic (PLEG): container finished" podID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerID="0346c176dc90077c526ec55f48c5438df729d085e71182ec99566d908fd39b00" exitCode=0 Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.662115 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4a61d901-34fa-4f3c-999d-5867aeaf8e3a","Type":"ContainerDied","Data":"0346c176dc90077c526ec55f48c5438df729d085e71182ec99566d908fd39b00"} Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.662371 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"4a61d901-34fa-4f3c-999d-5867aeaf8e3a","Type":"ContainerDied","Data":"6c0e0658f69f5c93dbbccaadb31e9b72fd544f5b2c38be3a2bbf9cb6e6478e2e"} Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.662386 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c0e0658f69f5c93dbbccaadb31e9b72fd544f5b2c38be3a2bbf9cb6e6478e2e" Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.752481 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.898694 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-combined-ca-bundle\") pod \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.898843 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qlfw\" (UniqueName: \"kubernetes.io/projected/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-kube-api-access-2qlfw\") pod \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.899007 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-logs\") pod \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.899064 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-config-data\") pod \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\" (UID: \"4a61d901-34fa-4f3c-999d-5867aeaf8e3a\") " Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.899357 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-logs" (OuterVolumeSpecName: "logs") pod "4a61d901-34fa-4f3c-999d-5867aeaf8e3a" (UID: "4a61d901-34fa-4f3c-999d-5867aeaf8e3a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.899721 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.908774 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-kube-api-access-2qlfw" (OuterVolumeSpecName: "kube-api-access-2qlfw") pod "4a61d901-34fa-4f3c-999d-5867aeaf8e3a" (UID: "4a61d901-34fa-4f3c-999d-5867aeaf8e3a"). InnerVolumeSpecName "kube-api-access-2qlfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.932493 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-config-data" (OuterVolumeSpecName: "config-data") pod "4a61d901-34fa-4f3c-999d-5867aeaf8e3a" (UID: "4a61d901-34fa-4f3c-999d-5867aeaf8e3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:46 crc kubenswrapper[4768]: I1203 16:44:46.937360 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a61d901-34fa-4f3c-999d-5867aeaf8e3a" (UID: "4a61d901-34fa-4f3c-999d-5867aeaf8e3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.001553 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.001580 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.001602 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qlfw\" (UniqueName: \"kubernetes.io/projected/4a61d901-34fa-4f3c-999d-5867aeaf8e3a-kube-api-access-2qlfw\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.249003 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.278500 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.406665 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.511509 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-ceilometer-tls-certs\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.511709 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-sg-core-conf-yaml\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.511739 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-scripts\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.511782 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-484gd\" (UniqueName: \"kubernetes.io/projected/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-kube-api-access-484gd\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.511826 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-combined-ca-bundle\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.511879 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-run-httpd\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.511976 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-config-data\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.512036 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-log-httpd\") pod \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\" (UID: \"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0\") " Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.512966 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.513315 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.516138 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-kube-api-access-484gd" (OuterVolumeSpecName: "kube-api-access-484gd") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "kube-api-access-484gd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.516567 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-scripts" (OuterVolumeSpecName: "scripts") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.540824 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.591494 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.616426 4768 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.616749 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.616839 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-484gd\" (UniqueName: \"kubernetes.io/projected/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-kube-api-access-484gd\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.616922 4768 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.616998 4768 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.617101 4768 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.619774 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.672552 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-config-data" (OuterVolumeSpecName: "config-data") pod "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" (UID: "0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.676357 4768 generic.go:334] "Generic (PLEG): container finished" podID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerID="94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70" exitCode=0 Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.676426 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.676497 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerDied","Data":"94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70"} Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.676578 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0","Type":"ContainerDied","Data":"e82244a04c2472a7fe35aeb904bd36681bd442cbca647baef013d58e50f3db2f"} Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.676618 4768 scope.go:117] "RemoveContainer" containerID="2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.676894 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.702463 4768 scope.go:117] "RemoveContainer" containerID="ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.703793 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.718671 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.718706 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.720657 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.731268 4768 scope.go:117] "RemoveContainer" containerID="94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.747321 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.761797 4768 scope.go:117] "RemoveContainer" containerID="daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.787669 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.788201 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-central-agent" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.788225 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-central-agent" Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.788236 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="proxy-httpd" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.788245 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="proxy-httpd" Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.788264 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-notification-agent" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.788272 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-notification-agent" Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.788305 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-log" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.788312 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-log" Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.788330 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-api" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.788336 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-api" Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.788362 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="sg-core" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.789698 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="sg-core" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.790059 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-central-agent" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.790077 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-api" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.790101 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" containerName="nova-api-log" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.790110 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="proxy-httpd" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.790123 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="ceilometer-notification-agent" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.790133 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" containerName="sg-core" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.791431 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.793911 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.794194 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.804522 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.806820 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.820564 4768 scope.go:117] "RemoveContainer" containerID="2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.821823 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.824772 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a\": container with ID starting with 2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a not found: ID does not exist" containerID="2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.824818 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a"} err="failed to get container status \"2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a\": rpc error: code = NotFound desc = could not find container \"2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a\": container with ID starting with 2ddae026cb277ade091f82e0fd266697b1fe1625de7f1405395165b8826fea7a not found: ID does not exist" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.824851 4768 scope.go:117] "RemoveContainer" containerID="ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8" Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.832775 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8\": container with ID starting with ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8 not found: ID does not exist" containerID="ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.832830 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8"} err="failed to get container status \"ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8\": rpc error: code = NotFound desc = could not find container \"ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8\": container with ID starting with ed44d2ccf174e2d31adb39ec7529297de3f80b4b42c630550ff29656f20365b8 not found: ID does not exist" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.832862 4768 scope.go:117] "RemoveContainer" containerID="94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.836195 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.839471 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70\": container with ID starting with 94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70 not found: ID does not exist" containerID="94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.839519 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70"} err="failed to get container status \"94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70\": rpc error: code = NotFound desc = could not find container \"94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70\": container with ID starting with 94b1d220a5482a099da37d72f34370a6b8a12d669fc1ad2e5c5f20efabde2a70 not found: ID does not exist" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.839552 4768 scope.go:117] "RemoveContainer" containerID="daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195" Dec 03 16:44:47 crc kubenswrapper[4768]: E1203 16:44:47.845810 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195\": container with ID starting with daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195 not found: ID does not exist" containerID="daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.845863 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195"} err="failed to get container status \"daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195\": rpc error: code = NotFound desc = could not find container \"daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195\": container with ID starting with daddda49b4ea58c9d12ad9ab36acf56724bce5f9ac12afd4039378dd28fe7195 not found: ID does not exist" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.850786 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.861178 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.868292 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.868529 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.868733 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.880150 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924590 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5tkv\" (UniqueName: \"kubernetes.io/projected/b60fbea6-faba-4e4d-a912-83a79c52b6b5-kube-api-access-q5tkv\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924683 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-config-data\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924713 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924753 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-config-data\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924777 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-run-httpd\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924795 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc65w\" (UniqueName: \"kubernetes.io/projected/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-kube-api-access-hc65w\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924825 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924860 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-internal-tls-certs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924888 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-log-httpd\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924912 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924962 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-logs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.924991 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-public-tls-certs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.925010 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.925036 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-scripts\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.934113 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-fcg7d"] Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.935544 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.937631 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.939205 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 03 16:44:47 crc kubenswrapper[4768]: I1203 16:44:47.961857 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-fcg7d"] Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027103 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-config-data\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027151 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-run-httpd\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027177 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc65w\" (UniqueName: \"kubernetes.io/projected/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-kube-api-access-hc65w\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027213 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027253 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-config-data\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027272 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-internal-tls-certs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027301 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-log-httpd\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027324 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.027469 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-scripts\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.028327 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-log-httpd\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.028385 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-run-httpd\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.029482 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-logs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.029538 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-public-tls-certs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.030203 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.030243 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.030321 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-scripts\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.030346 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5tkv\" (UniqueName: \"kubernetes.io/projected/b60fbea6-faba-4e4d-a912-83a79c52b6b5-kube-api-access-q5tkv\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.030393 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqcwf\" (UniqueName: \"kubernetes.io/projected/26b7ee95-c386-4920-9c4d-9d74ae5655c4-kube-api-access-tqcwf\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.030581 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-config-data\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.030654 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.031270 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-internal-tls-certs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.031346 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.031695 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-config-data\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.032130 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-logs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.032210 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.034954 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-scripts\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.035140 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.035567 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-config-data\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.035591 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-public-tls-certs\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.042118 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.042314 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc65w\" (UniqueName: \"kubernetes.io/projected/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-kube-api-access-hc65w\") pod \"nova-api-0\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.050424 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5tkv\" (UniqueName: \"kubernetes.io/projected/b60fbea6-faba-4e4d-a912-83a79c52b6b5-kube-api-access-q5tkv\") pod \"ceilometer-0\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.121941 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.132241 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-scripts\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.132309 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.132364 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqcwf\" (UniqueName: \"kubernetes.io/projected/26b7ee95-c386-4920-9c4d-9d74ae5655c4-kube-api-access-tqcwf\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.132496 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-config-data\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.135923 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-scripts\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.135954 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.135961 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-config-data\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.155461 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqcwf\" (UniqueName: \"kubernetes.io/projected/26b7ee95-c386-4920-9c4d-9d74ae5655c4-kube-api-access-tqcwf\") pod \"nova-cell1-cell-mapping-fcg7d\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.184046 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.257006 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:48 crc kubenswrapper[4768]: W1203 16:44:48.725111 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54da6841_6f5d_4f1f_b960_a8b3c86a2b36.slice/crio-036b4d2d411b1f5b32bc5352d6b129a61ff62aaeb7e16137c7006a14405beed4 WatchSource:0}: Error finding container 036b4d2d411b1f5b32bc5352d6b129a61ff62aaeb7e16137c7006a14405beed4: Status 404 returned error can't find the container with id 036b4d2d411b1f5b32bc5352d6b129a61ff62aaeb7e16137c7006a14405beed4 Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.735154 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.847417 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:44:48 crc kubenswrapper[4768]: W1203 16:44:48.850776 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb60fbea6_faba_4e4d_a912_83a79c52b6b5.slice/crio-a0b00ecfb27d59266cec4158002634b20eab647b676192bd64fed76edfc01c51 WatchSource:0}: Error finding container a0b00ecfb27d59266cec4158002634b20eab647b676192bd64fed76edfc01c51: Status 404 returned error can't find the container with id a0b00ecfb27d59266cec4158002634b20eab647b676192bd64fed76edfc01c51 Dec 03 16:44:48 crc kubenswrapper[4768]: I1203 16:44:48.959021 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-fcg7d"] Dec 03 16:44:48 crc kubenswrapper[4768]: W1203 16:44:48.963151 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26b7ee95_c386_4920_9c4d_9d74ae5655c4.slice/crio-08f92454889028138000f43ce4a279ff95caa6253567a26ae452766fbda65fe5 WatchSource:0}: Error finding container 08f92454889028138000f43ce4a279ff95caa6253567a26ae452766fbda65fe5: Status 404 returned error can't find the container with id 08f92454889028138000f43ce4a279ff95caa6253567a26ae452766fbda65fe5 Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.546143 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0" path="/var/lib/kubelet/pods/0a2dbb6c-b9a1-49e3-883e-404aca7ccfe0/volumes" Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.547289 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a61d901-34fa-4f3c-999d-5867aeaf8e3a" path="/var/lib/kubelet/pods/4a61d901-34fa-4f3c-999d-5867aeaf8e3a/volumes" Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.713237 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54da6841-6f5d-4f1f-b960-a8b3c86a2b36","Type":"ContainerStarted","Data":"420ba9e320cefd7009cbe6ae37dab2dab281ce81e75fd7307a238b722a39e764"} Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.713281 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54da6841-6f5d-4f1f-b960-a8b3c86a2b36","Type":"ContainerStarted","Data":"996ef4f57e9805fd3994cca914c8afef7edf769f495a63b0f998abed994f5107"} Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.713296 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54da6841-6f5d-4f1f-b960-a8b3c86a2b36","Type":"ContainerStarted","Data":"036b4d2d411b1f5b32bc5352d6b129a61ff62aaeb7e16137c7006a14405beed4"} Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.717585 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fcg7d" event={"ID":"26b7ee95-c386-4920-9c4d-9d74ae5655c4","Type":"ContainerStarted","Data":"f894cbdb416cb160d9cd0622470ce2e8df59206fb0b6113c90ec4ea22581cc3c"} Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.717661 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fcg7d" event={"ID":"26b7ee95-c386-4920-9c4d-9d74ae5655c4","Type":"ContainerStarted","Data":"08f92454889028138000f43ce4a279ff95caa6253567a26ae452766fbda65fe5"} Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.725534 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerStarted","Data":"0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4"} Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.725585 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerStarted","Data":"a0b00ecfb27d59266cec4158002634b20eab647b676192bd64fed76edfc01c51"} Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.749705 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.749683781 podStartE2EDuration="2.749683781s" podCreationTimestamp="2025-12-03 16:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:49.736523013 +0000 UTC m=+1586.655859436" watchObservedRunningTime="2025-12-03 16:44:49.749683781 +0000 UTC m=+1586.669020214" Dec 03 16:44:49 crc kubenswrapper[4768]: I1203 16:44:49.759237 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-fcg7d" podStartSLOduration=2.759214105 podStartE2EDuration="2.759214105s" podCreationTimestamp="2025-12-03 16:44:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:44:49.759205945 +0000 UTC m=+1586.678542368" watchObservedRunningTime="2025-12-03 16:44:49.759214105 +0000 UTC m=+1586.678550528" Dec 03 16:44:50 crc kubenswrapper[4768]: I1203 16:44:50.149225 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:44:50 crc kubenswrapper[4768]: I1203 16:44:50.224921 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zcp5f"] Dec 03 16:44:50 crc kubenswrapper[4768]: I1203 16:44:50.225182 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" podUID="62788624-724c-491a-9ed8-ca208a25a98b" containerName="dnsmasq-dns" containerID="cri-o://e231f5a86f32d575c7f78cc7bd35b9a55c9d2f18c222886dbdcdea4e81e97846" gracePeriod=10 Dec 03 16:44:50 crc kubenswrapper[4768]: I1203 16:44:50.737758 4768 generic.go:334] "Generic (PLEG): container finished" podID="62788624-724c-491a-9ed8-ca208a25a98b" containerID="e231f5a86f32d575c7f78cc7bd35b9a55c9d2f18c222886dbdcdea4e81e97846" exitCode=0 Dec 03 16:44:50 crc kubenswrapper[4768]: I1203 16:44:50.738889 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" event={"ID":"62788624-724c-491a-9ed8-ca208a25a98b","Type":"ContainerDied","Data":"e231f5a86f32d575c7f78cc7bd35b9a55c9d2f18c222886dbdcdea4e81e97846"} Dec 03 16:44:50 crc kubenswrapper[4768]: I1203 16:44:50.753373 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerStarted","Data":"f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555"} Dec 03 16:44:50 crc kubenswrapper[4768]: I1203 16:44:50.951253 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.019093 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-swift-storage-0\") pod \"62788624-724c-491a-9ed8-ca208a25a98b\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.019288 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-nb\") pod \"62788624-724c-491a-9ed8-ca208a25a98b\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.019391 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-775xs\" (UniqueName: \"kubernetes.io/projected/62788624-724c-491a-9ed8-ca208a25a98b-kube-api-access-775xs\") pod \"62788624-724c-491a-9ed8-ca208a25a98b\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.019434 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-config\") pod \"62788624-724c-491a-9ed8-ca208a25a98b\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.019507 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-sb\") pod \"62788624-724c-491a-9ed8-ca208a25a98b\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.019647 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-svc\") pod \"62788624-724c-491a-9ed8-ca208a25a98b\" (UID: \"62788624-724c-491a-9ed8-ca208a25a98b\") " Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.023418 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62788624-724c-491a-9ed8-ca208a25a98b-kube-api-access-775xs" (OuterVolumeSpecName: "kube-api-access-775xs") pod "62788624-724c-491a-9ed8-ca208a25a98b" (UID: "62788624-724c-491a-9ed8-ca208a25a98b"). InnerVolumeSpecName "kube-api-access-775xs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.095732 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "62788624-724c-491a-9ed8-ca208a25a98b" (UID: "62788624-724c-491a-9ed8-ca208a25a98b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.095738 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "62788624-724c-491a-9ed8-ca208a25a98b" (UID: "62788624-724c-491a-9ed8-ca208a25a98b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.104889 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-config" (OuterVolumeSpecName: "config") pod "62788624-724c-491a-9ed8-ca208a25a98b" (UID: "62788624-724c-491a-9ed8-ca208a25a98b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.107284 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "62788624-724c-491a-9ed8-ca208a25a98b" (UID: "62788624-724c-491a-9ed8-ca208a25a98b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.122205 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-775xs\" (UniqueName: \"kubernetes.io/projected/62788624-724c-491a-9ed8-ca208a25a98b-kube-api-access-775xs\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.122244 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.122257 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.122271 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.122282 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.141248 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "62788624-724c-491a-9ed8-ca208a25a98b" (UID: "62788624-724c-491a-9ed8-ca208a25a98b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.224129 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/62788624-724c-491a-9ed8-ca208a25a98b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.586645 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m6s2l"] Dec 03 16:44:51 crc kubenswrapper[4768]: E1203 16:44:51.587426 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62788624-724c-491a-9ed8-ca208a25a98b" containerName="dnsmasq-dns" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.587444 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="62788624-724c-491a-9ed8-ca208a25a98b" containerName="dnsmasq-dns" Dec 03 16:44:51 crc kubenswrapper[4768]: E1203 16:44:51.587463 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62788624-724c-491a-9ed8-ca208a25a98b" containerName="init" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.587469 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="62788624-724c-491a-9ed8-ca208a25a98b" containerName="init" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.587760 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="62788624-724c-491a-9ed8-ca208a25a98b" containerName="dnsmasq-dns" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.589238 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.598879 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m6s2l"] Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.635194 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-utilities\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.635398 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfwmf\" (UniqueName: \"kubernetes.io/projected/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-kube-api-access-pfwmf\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.635432 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-catalog-content\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.738085 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfwmf\" (UniqueName: \"kubernetes.io/projected/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-kube-api-access-pfwmf\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.738150 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-catalog-content\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.738367 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-utilities\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.738840 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-catalog-content\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.738925 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-utilities\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.756194 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfwmf\" (UniqueName: \"kubernetes.io/projected/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-kube-api-access-pfwmf\") pod \"certified-operators-m6s2l\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.763745 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.764084 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78cd565959-zcp5f" event={"ID":"62788624-724c-491a-9ed8-ca208a25a98b","Type":"ContainerDied","Data":"83fad0930bc08c3dd7801d8ddeb085ce87ff04fab4d7cb50938fc670a5fcdfb5"} Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.764131 4768 scope.go:117] "RemoveContainer" containerID="e231f5a86f32d575c7f78cc7bd35b9a55c9d2f18c222886dbdcdea4e81e97846" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.778574 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerStarted","Data":"7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a"} Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.827713 4768 scope.go:117] "RemoveContainer" containerID="4372aae51eb0c5342ee4dc9620536607242f4dfa65a9004475855b83f4e49c9d" Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.843915 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zcp5f"] Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.859256 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78cd565959-zcp5f"] Dec 03 16:44:51 crc kubenswrapper[4768]: I1203 16:44:51.927147 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:44:56 crc kubenswrapper[4768]: W1203 16:44:53.451058 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04dced2d_ba9d_4b3c_8b45_c7c48257f7a9.slice/crio-48ca2c36d0ca28aa170a143d1fbb76d7558a7fa498241587af7df904f877d404 WatchSource:0}: Error finding container 48ca2c36d0ca28aa170a143d1fbb76d7558a7fa498241587af7df904f877d404: Status 404 returned error can't find the container with id 48ca2c36d0ca28aa170a143d1fbb76d7558a7fa498241587af7df904f877d404 Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:53.471854 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m6s2l"] Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:53.552833 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62788624-724c-491a-9ed8-ca208a25a98b" path="/var/lib/kubelet/pods/62788624-724c-491a-9ed8-ca208a25a98b/volumes" Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:53.808318 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6s2l" event={"ID":"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9","Type":"ContainerStarted","Data":"48ca2c36d0ca28aa170a143d1fbb76d7558a7fa498241587af7df904f877d404"} Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:53.811275 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerStarted","Data":"353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7"} Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:53.811485 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:53.838288 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.577582203 podStartE2EDuration="6.838263578s" podCreationTimestamp="2025-12-03 16:44:47 +0000 UTC" firstStartedPulling="2025-12-03 16:44:48.853270877 +0000 UTC m=+1585.772607300" lastFinishedPulling="2025-12-03 16:44:53.113952252 +0000 UTC m=+1590.033288675" observedRunningTime="2025-12-03 16:44:53.828176919 +0000 UTC m=+1590.747513362" watchObservedRunningTime="2025-12-03 16:44:53.838263578 +0000 UTC m=+1590.757600001" Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:55.532784 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:44:56 crc kubenswrapper[4768]: E1203 16:44:55.533731 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:56.853859 4768 generic.go:334] "Generic (PLEG): container finished" podID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerID="308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9" exitCode=0 Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:56.854027 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6s2l" event={"ID":"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9","Type":"ContainerDied","Data":"308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9"} Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:56.862537 4768 generic.go:334] "Generic (PLEG): container finished" podID="26b7ee95-c386-4920-9c4d-9d74ae5655c4" containerID="f894cbdb416cb160d9cd0622470ce2e8df59206fb0b6113c90ec4ea22581cc3c" exitCode=0 Dec 03 16:44:56 crc kubenswrapper[4768]: I1203 16:44:56.862827 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fcg7d" event={"ID":"26b7ee95-c386-4920-9c4d-9d74ae5655c4","Type":"ContainerDied","Data":"f894cbdb416cb160d9cd0622470ce2e8df59206fb0b6113c90ec4ea22581cc3c"} Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.122672 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.123247 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.367512 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.490383 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-config-data\") pod \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.490501 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-scripts\") pod \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.490726 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqcwf\" (UniqueName: \"kubernetes.io/projected/26b7ee95-c386-4920-9c4d-9d74ae5655c4-kube-api-access-tqcwf\") pod \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.490776 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-combined-ca-bundle\") pod \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\" (UID: \"26b7ee95-c386-4920-9c4d-9d74ae5655c4\") " Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.497324 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-scripts" (OuterVolumeSpecName: "scripts") pod "26b7ee95-c386-4920-9c4d-9d74ae5655c4" (UID: "26b7ee95-c386-4920-9c4d-9d74ae5655c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.511928 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26b7ee95-c386-4920-9c4d-9d74ae5655c4-kube-api-access-tqcwf" (OuterVolumeSpecName: "kube-api-access-tqcwf") pod "26b7ee95-c386-4920-9c4d-9d74ae5655c4" (UID: "26b7ee95-c386-4920-9c4d-9d74ae5655c4"). InnerVolumeSpecName "kube-api-access-tqcwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.534680 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26b7ee95-c386-4920-9c4d-9d74ae5655c4" (UID: "26b7ee95-c386-4920-9c4d-9d74ae5655c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.535532 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-config-data" (OuterVolumeSpecName: "config-data") pod "26b7ee95-c386-4920-9c4d-9d74ae5655c4" (UID: "26b7ee95-c386-4920-9c4d-9d74ae5655c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.593699 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqcwf\" (UniqueName: \"kubernetes.io/projected/26b7ee95-c386-4920-9c4d-9d74ae5655c4-kube-api-access-tqcwf\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.593741 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.593755 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.593766 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/26b7ee95-c386-4920-9c4d-9d74ae5655c4-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.962808 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6s2l" event={"ID":"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9","Type":"ContainerStarted","Data":"a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81"} Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.967118 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fcg7d" event={"ID":"26b7ee95-c386-4920-9c4d-9d74ae5655c4","Type":"ContainerDied","Data":"08f92454889028138000f43ce4a279ff95caa6253567a26ae452766fbda65fe5"} Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.967162 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08f92454889028138000f43ce4a279ff95caa6253567a26ae452766fbda65fe5" Dec 03 16:44:58 crc kubenswrapper[4768]: I1203 16:44:58.967238 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fcg7d" Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.106013 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.106289 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-log" containerID="cri-o://996ef4f57e9805fd3994cca914c8afef7edf769f495a63b0f998abed994f5107" gracePeriod=30 Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.107218 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-api" containerID="cri-o://420ba9e320cefd7009cbe6ae37dab2dab281ce81e75fd7307a238b722a39e764" gracePeriod=30 Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.116618 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.225:8774/\": EOF" Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.119083 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.225:8774/\": EOF" Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.126016 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.126217 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" containerName="nova-scheduler-scheduler" containerID="cri-o://3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" gracePeriod=30 Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.194334 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.195117 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-log" containerID="cri-o://3cc120048fc591b79d31e15701a73dcf3c911299b6e9987361e4017ee66aba36" gracePeriod=30 Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.195360 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-metadata" containerID="cri-o://02a8d88cd74635ce325de2b36f603da708a368b9eed9b031fff02f70670663ad" gracePeriod=30 Dec 03 16:44:59 crc kubenswrapper[4768]: E1203 16:44:59.426960 4768 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:44:59 crc kubenswrapper[4768]: E1203 16:44:59.428682 4768 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:44:59 crc kubenswrapper[4768]: E1203 16:44:59.433421 4768 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:44:59 crc kubenswrapper[4768]: E1203 16:44:59.433505 4768 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" containerName="nova-scheduler-scheduler" Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.980631 4768 generic.go:334] "Generic (PLEG): container finished" podID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerID="3cc120048fc591b79d31e15701a73dcf3c911299b6e9987361e4017ee66aba36" exitCode=143 Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.980721 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8","Type":"ContainerDied","Data":"3cc120048fc591b79d31e15701a73dcf3c911299b6e9987361e4017ee66aba36"} Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.982851 4768 generic.go:334] "Generic (PLEG): container finished" podID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerID="996ef4f57e9805fd3994cca914c8afef7edf769f495a63b0f998abed994f5107" exitCode=143 Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.982979 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54da6841-6f5d-4f1f-b960-a8b3c86a2b36","Type":"ContainerDied","Data":"996ef4f57e9805fd3994cca914c8afef7edf769f495a63b0f998abed994f5107"} Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.986449 4768 generic.go:334] "Generic (PLEG): container finished" podID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerID="a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81" exitCode=0 Dec 03 16:44:59 crc kubenswrapper[4768]: I1203 16:44:59.986487 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6s2l" event={"ID":"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9","Type":"ContainerDied","Data":"a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81"} Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.147238 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj"] Dec 03 16:45:00 crc kubenswrapper[4768]: E1203 16:45:00.147824 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26b7ee95-c386-4920-9c4d-9d74ae5655c4" containerName="nova-manage" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.147841 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="26b7ee95-c386-4920-9c4d-9d74ae5655c4" containerName="nova-manage" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.148051 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="26b7ee95-c386-4920-9c4d-9d74ae5655c4" containerName="nova-manage" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.148874 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.151265 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.154504 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.157414 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj"] Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.227506 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8kw9\" (UniqueName: \"kubernetes.io/projected/2862f4a4-cd54-45e3-aa8b-322c0b39830d-kube-api-access-f8kw9\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.227839 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2862f4a4-cd54-45e3-aa8b-322c0b39830d-secret-volume\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.227937 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2862f4a4-cd54-45e3-aa8b-322c0b39830d-config-volume\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.329950 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8kw9\" (UniqueName: \"kubernetes.io/projected/2862f4a4-cd54-45e3-aa8b-322c0b39830d-kube-api-access-f8kw9\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.330096 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2862f4a4-cd54-45e3-aa8b-322c0b39830d-secret-volume\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.330145 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2862f4a4-cd54-45e3-aa8b-322c0b39830d-config-volume\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:00 crc kubenswrapper[4768]: I1203 16:45:00.331129 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2862f4a4-cd54-45e3-aa8b-322c0b39830d-config-volume\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:01 crc kubenswrapper[4768]: I1203 16:45:01.159953 4768 patch_prober.go:28] interesting pod/downloads-7954f5f757-ncgcs container/download-server namespace/openshift-console: Liveness probe status=failure output="" start-of-body= Dec 03 16:45:01 crc kubenswrapper[4768]: I1203 16:45:01.201497 4768 patch_prober.go:28] interesting pod/downloads-7954f5f757-ncgcs container/download-server namespace/openshift-console: Readiness probe status=failure output="" start-of-body= Dec 03 16:45:02 crc kubenswrapper[4768]: I1203 16:45:02.170399 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8kw9\" (UniqueName: \"kubernetes.io/projected/2862f4a4-cd54-45e3-aa8b-322c0b39830d-kube-api-access-f8kw9\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:02 crc kubenswrapper[4768]: I1203 16:45:02.170641 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2862f4a4-cd54-45e3-aa8b-322c0b39830d-secret-volume\") pod \"collect-profiles-29413005-5bsgj\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:02 crc kubenswrapper[4768]: I1203 16:45:02.204442 4768 trace.go:236] Trace[833305249]: "Calculate volume metrics of mysql-db for pod openstack/openstack-cell1-galera-0" (03-Dec-2025 16:44:59.994) (total time: 2210ms): Dec 03 16:45:02 crc kubenswrapper[4768]: Trace[833305249]: [2.210064583s] [2.210064583s] END Dec 03 16:45:02 crc kubenswrapper[4768]: I1203 16:45:02.212029 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="metallb-system/frr-k8s-k7czv" podUID="e9282b00-b418-4626-9620-8ca6252433b2" containerName="frr" probeResult="failure" output="Get \"http://127.0.0.1:7573/livez\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:45:02 crc kubenswrapper[4768]: I1203 16:45:02.267243 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:02 crc kubenswrapper[4768]: I1203 16:45:02.609030 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.219:8775/\": read tcp 10.217.0.2:60540->10.217.0.219:8775: read: connection reset by peer" Dec 03 16:45:02 crc kubenswrapper[4768]: I1203 16:45:02.609140 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.219:8775/\": read tcp 10.217.0.2:60544->10.217.0.219:8775: read: connection reset by peer" Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.041156 4768 generic.go:334] "Generic (PLEG): container finished" podID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerID="02a8d88cd74635ce325de2b36f603da708a368b9eed9b031fff02f70670663ad" exitCode=0 Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.041406 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8","Type":"ContainerDied","Data":"02a8d88cd74635ce325de2b36f603da708a368b9eed9b031fff02f70670663ad"} Dec 03 16:45:03 crc kubenswrapper[4768]: W1203 16:45:03.160003 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2862f4a4_cd54_45e3_aa8b_322c0b39830d.slice/crio-c50360fde06246ea29f867b6a218fbe8b4e7f99b7c5e0e2eb3434846182368f5 WatchSource:0}: Error finding container c50360fde06246ea29f867b6a218fbe8b4e7f99b7c5e0e2eb3434846182368f5: Status 404 returned error can't find the container with id c50360fde06246ea29f867b6a218fbe8b4e7f99b7c5e0e2eb3434846182368f5 Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.160546 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj"] Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.766324 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.920221 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-combined-ca-bundle\") pod \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.920366 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nrqm\" (UniqueName: \"kubernetes.io/projected/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-kube-api-access-4nrqm\") pod \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.921101 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-nova-metadata-tls-certs\") pod \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.921218 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-config-data\") pod \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.921276 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-logs\") pod \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\" (UID: \"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8\") " Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.922174 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-logs" (OuterVolumeSpecName: "logs") pod "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" (UID: "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.928851 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-kube-api-access-4nrqm" (OuterVolumeSpecName: "kube-api-access-4nrqm") pod "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" (UID: "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8"). InnerVolumeSpecName "kube-api-access-4nrqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:03 crc kubenswrapper[4768]: I1203 16:45:03.978753 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-config-data" (OuterVolumeSpecName: "config-data") pod "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" (UID: "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.024164 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nrqm\" (UniqueName: \"kubernetes.io/projected/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-kube-api-access-4nrqm\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.024210 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.024222 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.047762 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" (UID: "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.072815 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" (UID: "7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.074854 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8","Type":"ContainerDied","Data":"a006794678788ac65bb82297281fe7103299980d47a14cd0ba5fbe7d2e331175"} Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.074903 4768 scope.go:117] "RemoveContainer" containerID="02a8d88cd74635ce325de2b36f603da708a368b9eed9b031fff02f70670663ad" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.074864 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.078109 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6s2l" event={"ID":"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9","Type":"ContainerStarted","Data":"d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8"} Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.080757 4768 generic.go:334] "Generic (PLEG): container finished" podID="2862f4a4-cd54-45e3-aa8b-322c0b39830d" containerID="d9c75baff1183188786cbaa07ab8ace2077d58a7611294b6abbe5dc49a6b0ff6" exitCode=0 Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.080970 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" event={"ID":"2862f4a4-cd54-45e3-aa8b-322c0b39830d","Type":"ContainerDied","Data":"d9c75baff1183188786cbaa07ab8ace2077d58a7611294b6abbe5dc49a6b0ff6"} Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.081086 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" event={"ID":"2862f4a4-cd54-45e3-aa8b-322c0b39830d","Type":"ContainerStarted","Data":"c50360fde06246ea29f867b6a218fbe8b4e7f99b7c5e0e2eb3434846182368f5"} Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.097943 4768 scope.go:117] "RemoveContainer" containerID="3cc120048fc591b79d31e15701a73dcf3c911299b6e9987361e4017ee66aba36" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.125560 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.125624 4768 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.129776 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m6s2l" podStartSLOduration=7.046884629 podStartE2EDuration="13.129758883s" podCreationTimestamp="2025-12-03 16:44:51 +0000 UTC" firstStartedPulling="2025-12-03 16:44:56.857171669 +0000 UTC m=+1593.776508092" lastFinishedPulling="2025-12-03 16:45:02.940045913 +0000 UTC m=+1599.859382346" observedRunningTime="2025-12-03 16:45:04.125966356 +0000 UTC m=+1601.045302779" watchObservedRunningTime="2025-12-03 16:45:04.129758883 +0000 UTC m=+1601.049095306" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.159051 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.179331 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.203760 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:45:04 crc kubenswrapper[4768]: E1203 16:45:04.204232 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-log" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.204252 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-log" Dec 03 16:45:04 crc kubenswrapper[4768]: E1203 16:45:04.204275 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-metadata" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.204281 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-metadata" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.204519 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-metadata" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.204536 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" containerName="nova-metadata-log" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.205675 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.212089 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.212312 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.243870 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.329504 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.329589 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-config-data\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.329653 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.329687 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-logs\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.329729 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9s4n\" (UniqueName: \"kubernetes.io/projected/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-kube-api-access-f9s4n\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: E1203 16:45:04.428747 4768 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474 is running failed: container process not found" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:45:04 crc kubenswrapper[4768]: E1203 16:45:04.429310 4768 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474 is running failed: container process not found" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:45:04 crc kubenswrapper[4768]: E1203 16:45:04.429543 4768 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474 is running failed: container process not found" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 03 16:45:04 crc kubenswrapper[4768]: E1203 16:45:04.429573 4768 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" containerName="nova-scheduler-scheduler" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.430871 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9s4n\" (UniqueName: \"kubernetes.io/projected/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-kube-api-access-f9s4n\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.431038 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.431082 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-config-data\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.431127 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.431167 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-logs\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.431512 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-logs\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.437742 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.447241 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.449673 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-config-data\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.449990 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9s4n\" (UniqueName: \"kubernetes.io/projected/4f7cf11c-aca1-42f7-a9f4-e9c7c941269b-kube-api-access-f9s4n\") pod \"nova-metadata-0\" (UID: \"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b\") " pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.649457 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.741211 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.808267 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-combined-ca-bundle\") pod \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.810262 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bmpx\" (UniqueName: \"kubernetes.io/projected/97af800a-a6f3-4818-bfc9-c27e0bd25e44-kube-api-access-5bmpx\") pod \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.814479 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-config-data\") pod \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\" (UID: \"97af800a-a6f3-4818-bfc9-c27e0bd25e44\") " Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.818378 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97af800a-a6f3-4818-bfc9-c27e0bd25e44-kube-api-access-5bmpx" (OuterVolumeSpecName: "kube-api-access-5bmpx") pod "97af800a-a6f3-4818-bfc9-c27e0bd25e44" (UID: "97af800a-a6f3-4818-bfc9-c27e0bd25e44"). InnerVolumeSpecName "kube-api-access-5bmpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.846675 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-config-data" (OuterVolumeSpecName: "config-data") pod "97af800a-a6f3-4818-bfc9-c27e0bd25e44" (UID: "97af800a-a6f3-4818-bfc9-c27e0bd25e44"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.871484 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97af800a-a6f3-4818-bfc9-c27e0bd25e44" (UID: "97af800a-a6f3-4818-bfc9-c27e0bd25e44"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.917089 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.917121 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97af800a-a6f3-4818-bfc9-c27e0bd25e44-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:04 crc kubenswrapper[4768]: I1203 16:45:04.917132 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bmpx\" (UniqueName: \"kubernetes.io/projected/97af800a-a6f3-4818-bfc9-c27e0bd25e44-kube-api-access-5bmpx\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.095563 4768 generic.go:334] "Generic (PLEG): container finished" podID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" exitCode=0 Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.095640 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"97af800a-a6f3-4818-bfc9-c27e0bd25e44","Type":"ContainerDied","Data":"3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474"} Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.095660 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.095701 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"97af800a-a6f3-4818-bfc9-c27e0bd25e44","Type":"ContainerDied","Data":"31b36c7e102e406bd8ba8fc8065d084804517b4ec3bba10a3e30b02da72892f2"} Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.095721 4768 scope.go:117] "RemoveContainer" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.134453 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.150544 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.159031 4768 scope.go:117] "RemoveContainer" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" Dec 03 16:45:05 crc kubenswrapper[4768]: E1203 16:45:05.159790 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474\": container with ID starting with 3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474 not found: ID does not exist" containerID="3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.159831 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474"} err="failed to get container status \"3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474\": rpc error: code = NotFound desc = could not find container \"3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474\": container with ID starting with 3b42f2e6e16a3742103faad59a12b5b4d98ab4967fed41c342e2b16bcb393474 not found: ID does not exist" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.178119 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:45:05 crc kubenswrapper[4768]: E1203 16:45:05.178691 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" containerName="nova-scheduler-scheduler" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.178709 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" containerName="nova-scheduler-scheduler" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.178906 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" containerName="nova-scheduler-scheduler" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.179765 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.182477 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.198760 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.206717 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.223199 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a17195-ba31-4233-b087-f31d38ff03a7-config-data\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.223734 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a17195-ba31-4233-b087-f31d38ff03a7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.223888 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd8mr\" (UniqueName: \"kubernetes.io/projected/31a17195-ba31-4233-b087-f31d38ff03a7-kube-api-access-hd8mr\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.325369 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd8mr\" (UniqueName: \"kubernetes.io/projected/31a17195-ba31-4233-b087-f31d38ff03a7-kube-api-access-hd8mr\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.325411 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a17195-ba31-4233-b087-f31d38ff03a7-config-data\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.325541 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a17195-ba31-4233-b087-f31d38ff03a7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.331135 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31a17195-ba31-4233-b087-f31d38ff03a7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.331870 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31a17195-ba31-4233-b087-f31d38ff03a7-config-data\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.349704 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd8mr\" (UniqueName: \"kubernetes.io/projected/31a17195-ba31-4233-b087-f31d38ff03a7-kube-api-access-hd8mr\") pod \"nova-scheduler-0\" (UID: \"31a17195-ba31-4233-b087-f31d38ff03a7\") " pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.544053 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8" path="/var/lib/kubelet/pods/7d6e9bf9-5f7b-4ca3-a1d8-83ecebd939d8/volumes" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.544888 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97af800a-a6f3-4818-bfc9-c27e0bd25e44" path="/var/lib/kubelet/pods/97af800a-a6f3-4818-bfc9-c27e0bd25e44/volumes" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.550013 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.606055 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.630635 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8kw9\" (UniqueName: \"kubernetes.io/projected/2862f4a4-cd54-45e3-aa8b-322c0b39830d-kube-api-access-f8kw9\") pod \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.630953 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2862f4a4-cd54-45e3-aa8b-322c0b39830d-secret-volume\") pod \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.631064 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2862f4a4-cd54-45e3-aa8b-322c0b39830d-config-volume\") pod \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\" (UID: \"2862f4a4-cd54-45e3-aa8b-322c0b39830d\") " Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.633589 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2862f4a4-cd54-45e3-aa8b-322c0b39830d-config-volume" (OuterVolumeSpecName: "config-volume") pod "2862f4a4-cd54-45e3-aa8b-322c0b39830d" (UID: "2862f4a4-cd54-45e3-aa8b-322c0b39830d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.639556 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2862f4a4-cd54-45e3-aa8b-322c0b39830d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2862f4a4-cd54-45e3-aa8b-322c0b39830d" (UID: "2862f4a4-cd54-45e3-aa8b-322c0b39830d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.640325 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2862f4a4-cd54-45e3-aa8b-322c0b39830d-kube-api-access-f8kw9" (OuterVolumeSpecName: "kube-api-access-f8kw9") pod "2862f4a4-cd54-45e3-aa8b-322c0b39830d" (UID: "2862f4a4-cd54-45e3-aa8b-322c0b39830d"). InnerVolumeSpecName "kube-api-access-f8kw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.734135 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8kw9\" (UniqueName: \"kubernetes.io/projected/2862f4a4-cd54-45e3-aa8b-322c0b39830d-kube-api-access-f8kw9\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.734196 4768 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2862f4a4-cd54-45e3-aa8b-322c0b39830d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:05 crc kubenswrapper[4768]: I1203 16:45:05.734210 4768 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2862f4a4-cd54-45e3-aa8b-322c0b39830d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.042099 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 03 16:45:06 crc kubenswrapper[4768]: W1203 16:45:06.044677 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31a17195_ba31_4233_b087_f31d38ff03a7.slice/crio-fa1e770ff753d4fd605b5e55f4e9d5e4c8730c40e900f6bec29b8ff382f1b049 WatchSource:0}: Error finding container fa1e770ff753d4fd605b5e55f4e9d5e4c8730c40e900f6bec29b8ff382f1b049: Status 404 returned error can't find the container with id fa1e770ff753d4fd605b5e55f4e9d5e4c8730c40e900f6bec29b8ff382f1b049 Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.117802 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b","Type":"ContainerStarted","Data":"fd1ae589776e8bf9ac4e0a427e4e42a23154d190ecbbb95322cc4cdd107dd454"} Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.117857 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b","Type":"ContainerStarted","Data":"005e74da8f8c2f167880c1e27511f549309e72c511e457e3e2fbcd4496891479"} Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.117873 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4f7cf11c-aca1-42f7-a9f4-e9c7c941269b","Type":"ContainerStarted","Data":"e1e6894d5f498898d2c78fdf8c55abe4d0becefd98337c4babc9ebb9bd2820dc"} Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.120003 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"31a17195-ba31-4233-b087-f31d38ff03a7","Type":"ContainerStarted","Data":"fa1e770ff753d4fd605b5e55f4e9d5e4c8730c40e900f6bec29b8ff382f1b049"} Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.122156 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" event={"ID":"2862f4a4-cd54-45e3-aa8b-322c0b39830d","Type":"ContainerDied","Data":"c50360fde06246ea29f867b6a218fbe8b4e7f99b7c5e0e2eb3434846182368f5"} Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.122196 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c50360fde06246ea29f867b6a218fbe8b4e7f99b7c5e0e2eb3434846182368f5" Dec 03 16:45:06 crc kubenswrapper[4768]: I1203 16:45:06.122168 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj" Dec 03 16:45:07 crc kubenswrapper[4768]: I1203 16:45:07.134331 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"31a17195-ba31-4233-b087-f31d38ff03a7","Type":"ContainerStarted","Data":"3677887d173c483e0a0fba668cc08dc85be8cdd9aade4bad4fd27b47fc4d12a9"} Dec 03 16:45:07 crc kubenswrapper[4768]: I1203 16:45:07.159198 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.159181802 podStartE2EDuration="2.159181802s" podCreationTimestamp="2025-12-03 16:45:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:45:07.148519658 +0000 UTC m=+1604.067856081" watchObservedRunningTime="2025-12-03 16:45:07.159181802 +0000 UTC m=+1604.078518225" Dec 03 16:45:07 crc kubenswrapper[4768]: I1203 16:45:07.180418 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.180396156 podStartE2EDuration="3.180396156s" podCreationTimestamp="2025-12-03 16:45:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:45:07.172405841 +0000 UTC m=+1604.091742274" watchObservedRunningTime="2025-12-03 16:45:07.180396156 +0000 UTC m=+1604.099732589" Dec 03 16:45:07 crc kubenswrapper[4768]: I1203 16:45:07.532279 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:45:07 crc kubenswrapper[4768]: E1203 16:45:07.532556 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:45:08 crc kubenswrapper[4768]: I1203 16:45:08.166367 4768 generic.go:334] "Generic (PLEG): container finished" podID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerID="420ba9e320cefd7009cbe6ae37dab2dab281ce81e75fd7307a238b722a39e764" exitCode=0 Dec 03 16:45:08 crc kubenswrapper[4768]: I1203 16:45:08.166412 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54da6841-6f5d-4f1f-b960-a8b3c86a2b36","Type":"ContainerDied","Data":"420ba9e320cefd7009cbe6ae37dab2dab281ce81e75fd7307a238b722a39e764"} Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.649786 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.651403 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.684277 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.830154 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-public-tls-certs\") pod \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.830289 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-internal-tls-certs\") pod \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.830322 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-config-data\") pod \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.830429 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-combined-ca-bundle\") pod \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.830487 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-logs\") pod \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.830536 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc65w\" (UniqueName: \"kubernetes.io/projected/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-kube-api-access-hc65w\") pod \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\" (UID: \"54da6841-6f5d-4f1f-b960-a8b3c86a2b36\") " Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.831390 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-logs" (OuterVolumeSpecName: "logs") pod "54da6841-6f5d-4f1f-b960-a8b3c86a2b36" (UID: "54da6841-6f5d-4f1f-b960-a8b3c86a2b36"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.837718 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-kube-api-access-hc65w" (OuterVolumeSpecName: "kube-api-access-hc65w") pod "54da6841-6f5d-4f1f-b960-a8b3c86a2b36" (UID: "54da6841-6f5d-4f1f-b960-a8b3c86a2b36"). InnerVolumeSpecName "kube-api-access-hc65w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.860661 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-config-data" (OuterVolumeSpecName: "config-data") pod "54da6841-6f5d-4f1f-b960-a8b3c86a2b36" (UID: "54da6841-6f5d-4f1f-b960-a8b3c86a2b36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.870914 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54da6841-6f5d-4f1f-b960-a8b3c86a2b36" (UID: "54da6841-6f5d-4f1f-b960-a8b3c86a2b36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.893546 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "54da6841-6f5d-4f1f-b960-a8b3c86a2b36" (UID: "54da6841-6f5d-4f1f-b960-a8b3c86a2b36"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.902180 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "54da6841-6f5d-4f1f-b960-a8b3c86a2b36" (UID: "54da6841-6f5d-4f1f-b960-a8b3c86a2b36"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.933536 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.933819 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.933829 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc65w\" (UniqueName: \"kubernetes.io/projected/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-kube-api-access-hc65w\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.933840 4768 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.933848 4768 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:09 crc kubenswrapper[4768]: I1203 16:45:09.933856 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54da6841-6f5d-4f1f-b960-a8b3c86a2b36-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.199288 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.199287 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"54da6841-6f5d-4f1f-b960-a8b3c86a2b36","Type":"ContainerDied","Data":"036b4d2d411b1f5b32bc5352d6b129a61ff62aaeb7e16137c7006a14405beed4"} Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.199396 4768 scope.go:117] "RemoveContainer" containerID="420ba9e320cefd7009cbe6ae37dab2dab281ce81e75fd7307a238b722a39e764" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.238394 4768 scope.go:117] "RemoveContainer" containerID="996ef4f57e9805fd3994cca914c8afef7edf769f495a63b0f998abed994f5107" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.261662 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.275334 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.286966 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 03 16:45:10 crc kubenswrapper[4768]: E1203 16:45:10.287478 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-log" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.287501 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-log" Dec 03 16:45:10 crc kubenswrapper[4768]: E1203 16:45:10.287530 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2862f4a4-cd54-45e3-aa8b-322c0b39830d" containerName="collect-profiles" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.287537 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2862f4a4-cd54-45e3-aa8b-322c0b39830d" containerName="collect-profiles" Dec 03 16:45:10 crc kubenswrapper[4768]: E1203 16:45:10.287560 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-api" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.287566 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-api" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.287835 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2862f4a4-cd54-45e3-aa8b-322c0b39830d" containerName="collect-profiles" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.287869 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-log" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.287898 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" containerName="nova-api-api" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.289317 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.293649 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.293890 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.297161 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.299529 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.345896 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.346027 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-config-data\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.346123 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9lxz\" (UniqueName: \"kubernetes.io/projected/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-kube-api-access-q9lxz\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.346159 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-public-tls-certs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.346192 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.346266 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-logs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.447637 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.447751 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-config-data\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.447821 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9lxz\" (UniqueName: \"kubernetes.io/projected/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-kube-api-access-q9lxz\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.447849 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-public-tls-certs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.447874 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.447900 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-logs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.448285 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-logs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.452283 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-public-tls-certs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.452402 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-internal-tls-certs\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.452509 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.452758 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-config-data\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.476364 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9lxz\" (UniqueName: \"kubernetes.io/projected/2b4e5e05-1afb-4f90-93de-6331cd92bfcf-kube-api-access-q9lxz\") pod \"nova-api-0\" (UID: \"2b4e5e05-1afb-4f90-93de-6331cd92bfcf\") " pod="openstack/nova-api-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.550544 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 03 16:45:10 crc kubenswrapper[4768]: I1203 16:45:10.610079 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 03 16:45:11 crc kubenswrapper[4768]: I1203 16:45:11.105782 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 03 16:45:11 crc kubenswrapper[4768]: W1203 16:45:11.108650 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b4e5e05_1afb_4f90_93de_6331cd92bfcf.slice/crio-cfd4565c255e03148b2a7e01cc8aab6df0e025b81b5798018eb0f5612a8cea85 WatchSource:0}: Error finding container cfd4565c255e03148b2a7e01cc8aab6df0e025b81b5798018eb0f5612a8cea85: Status 404 returned error can't find the container with id cfd4565c255e03148b2a7e01cc8aab6df0e025b81b5798018eb0f5612a8cea85 Dec 03 16:45:11 crc kubenswrapper[4768]: I1203 16:45:11.211524 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b4e5e05-1afb-4f90-93de-6331cd92bfcf","Type":"ContainerStarted","Data":"cfd4565c255e03148b2a7e01cc8aab6df0e025b81b5798018eb0f5612a8cea85"} Dec 03 16:45:11 crc kubenswrapper[4768]: I1203 16:45:11.545963 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54da6841-6f5d-4f1f-b960-a8b3c86a2b36" path="/var/lib/kubelet/pods/54da6841-6f5d-4f1f-b960-a8b3c86a2b36/volumes" Dec 03 16:45:11 crc kubenswrapper[4768]: I1203 16:45:11.927629 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:45:11 crc kubenswrapper[4768]: I1203 16:45:11.927947 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:45:11 crc kubenswrapper[4768]: I1203 16:45:11.988909 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:45:12 crc kubenswrapper[4768]: I1203 16:45:12.299419 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:45:12 crc kubenswrapper[4768]: I1203 16:45:12.357089 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m6s2l"] Dec 03 16:45:13 crc kubenswrapper[4768]: I1203 16:45:13.256195 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b4e5e05-1afb-4f90-93de-6331cd92bfcf","Type":"ContainerStarted","Data":"afdb333eb2312f8031dcc9bf721ed57ef88abf36ad9118483835cecb4a782dba"} Dec 03 16:45:13 crc kubenswrapper[4768]: I1203 16:45:13.256537 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"2b4e5e05-1afb-4f90-93de-6331cd92bfcf","Type":"ContainerStarted","Data":"cc4b6038faebd35eec29000f534a4a38a8ea32120c9f43ff49917efdf594e987"} Dec 03 16:45:13 crc kubenswrapper[4768]: I1203 16:45:13.286376 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.286354492 podStartE2EDuration="3.286354492s" podCreationTimestamp="2025-12-03 16:45:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:45:13.274729644 +0000 UTC m=+1610.194066097" watchObservedRunningTime="2025-12-03 16:45:13.286354492 +0000 UTC m=+1610.205690915" Dec 03 16:45:14 crc kubenswrapper[4768]: I1203 16:45:14.266796 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m6s2l" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="registry-server" containerID="cri-o://d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8" gracePeriod=2 Dec 03 16:45:14 crc kubenswrapper[4768]: I1203 16:45:14.650108 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:45:14 crc kubenswrapper[4768]: I1203 16:45:14.650369 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 03 16:45:14 crc kubenswrapper[4768]: I1203 16:45:14.887286 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.039557 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-utilities\") pod \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.039692 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfwmf\" (UniqueName: \"kubernetes.io/projected/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-kube-api-access-pfwmf\") pod \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.039772 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-catalog-content\") pod \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\" (UID: \"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9\") " Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.043538 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-utilities" (OuterVolumeSpecName: "utilities") pod "04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" (UID: "04dced2d-ba9d-4b3c-8b45-c7c48257f7a9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.047505 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-kube-api-access-pfwmf" (OuterVolumeSpecName: "kube-api-access-pfwmf") pod "04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" (UID: "04dced2d-ba9d-4b3c-8b45-c7c48257f7a9"). InnerVolumeSpecName "kube-api-access-pfwmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.091952 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" (UID: "04dced2d-ba9d-4b3c-8b45-c7c48257f7a9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.142178 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.142212 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfwmf\" (UniqueName: \"kubernetes.io/projected/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-kube-api-access-pfwmf\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.142227 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.278212 4768 generic.go:334] "Generic (PLEG): container finished" podID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerID="d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8" exitCode=0 Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.278268 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6s2l" event={"ID":"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9","Type":"ContainerDied","Data":"d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8"} Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.278276 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m6s2l" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.278311 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m6s2l" event={"ID":"04dced2d-ba9d-4b3c-8b45-c7c48257f7a9","Type":"ContainerDied","Data":"48ca2c36d0ca28aa170a143d1fbb76d7558a7fa498241587af7df904f877d404"} Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.278359 4768 scope.go:117] "RemoveContainer" containerID="d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.313757 4768 scope.go:117] "RemoveContainer" containerID="a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.320539 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m6s2l"] Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.338182 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m6s2l"] Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.362169 4768 scope.go:117] "RemoveContainer" containerID="308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.414728 4768 scope.go:117] "RemoveContainer" containerID="d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8" Dec 03 16:45:15 crc kubenswrapper[4768]: E1203 16:45:15.415128 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8\": container with ID starting with d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8 not found: ID does not exist" containerID="d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.415169 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8"} err="failed to get container status \"d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8\": rpc error: code = NotFound desc = could not find container \"d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8\": container with ID starting with d2a09d3053ee31eb63f3170325dd0ab151d480c6203ae525dbb112bf1255b5e8 not found: ID does not exist" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.415198 4768 scope.go:117] "RemoveContainer" containerID="a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81" Dec 03 16:45:15 crc kubenswrapper[4768]: E1203 16:45:15.415629 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81\": container with ID starting with a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81 not found: ID does not exist" containerID="a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.415651 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81"} err="failed to get container status \"a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81\": rpc error: code = NotFound desc = could not find container \"a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81\": container with ID starting with a825e71fa7c011d740041745535fb09a6d310a22b0497418559c4a2ebf6a3c81 not found: ID does not exist" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.415664 4768 scope.go:117] "RemoveContainer" containerID="308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9" Dec 03 16:45:15 crc kubenswrapper[4768]: E1203 16:45:15.415960 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9\": container with ID starting with 308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9 not found: ID does not exist" containerID="308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.415976 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9"} err="failed to get container status \"308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9\": rpc error: code = NotFound desc = could not find container \"308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9\": container with ID starting with 308aca41b3be4cfc92bca6a48f3553fc5d1f296f3f11d46652caa91fbe2c31a9 not found: ID does not exist" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.546477 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" path="/var/lib/kubelet/pods/04dced2d-ba9d-4b3c-8b45-c7c48257f7a9/volumes" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.550846 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.599556 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.663819 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4f7cf11c-aca1-42f7-a9f4-e9c7c941269b" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:45:15 crc kubenswrapper[4768]: I1203 16:45:15.663870 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="4f7cf11c-aca1-42f7-a9f4-e9c7c941269b" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:45:16 crc kubenswrapper[4768]: I1203 16:45:16.347744 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 03 16:45:18 crc kubenswrapper[4768]: I1203 16:45:18.200260 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 16:45:18 crc kubenswrapper[4768]: I1203 16:45:18.531766 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:45:18 crc kubenswrapper[4768]: E1203 16:45:18.532052 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:45:20 crc kubenswrapper[4768]: I1203 16:45:20.610950 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:45:20 crc kubenswrapper[4768]: I1203 16:45:20.611340 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 03 16:45:21 crc kubenswrapper[4768]: I1203 16:45:21.624752 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2b4e5e05-1afb-4f90-93de-6331cd92bfcf" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.232:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:45:21 crc kubenswrapper[4768]: I1203 16:45:21.624798 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="2b4e5e05-1afb-4f90-93de-6331cd92bfcf" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.232:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 03 16:45:24 crc kubenswrapper[4768]: I1203 16:45:24.656191 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 16:45:24 crc kubenswrapper[4768]: I1203 16:45:24.659002 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 03 16:45:24 crc kubenswrapper[4768]: I1203 16:45:24.665281 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 16:45:24 crc kubenswrapper[4768]: I1203 16:45:24.667242 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 03 16:45:30 crc kubenswrapper[4768]: I1203 16:45:30.621906 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:45:30 crc kubenswrapper[4768]: I1203 16:45:30.624290 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:45:30 crc kubenswrapper[4768]: I1203 16:45:30.624728 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 03 16:45:30 crc kubenswrapper[4768]: I1203 16:45:30.633349 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:45:31 crc kubenswrapper[4768]: I1203 16:45:31.481560 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 03 16:45:31 crc kubenswrapper[4768]: I1203 16:45:31.492067 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 03 16:45:33 crc kubenswrapper[4768]: I1203 16:45:33.539051 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:45:33 crc kubenswrapper[4768]: E1203 16:45:33.539701 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.512715 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-pxm2p"] Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.546062 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-pxm2p"] Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.646301 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-jpj28"] Dec 03 16:45:41 crc kubenswrapper[4768]: E1203 16:45:41.646861 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="extract-content" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.646884 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="extract-content" Dec 03 16:45:41 crc kubenswrapper[4768]: E1203 16:45:41.646928 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="registry-server" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.646938 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="registry-server" Dec 03 16:45:41 crc kubenswrapper[4768]: E1203 16:45:41.646956 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="extract-utilities" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.646964 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="extract-utilities" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.651826 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="04dced2d-ba9d-4b3c-8b45-c7c48257f7a9" containerName="registry-server" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.652892 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.658655 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.680313 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-jpj28"] Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.766328 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-config-data\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.766396 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcrcv\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-kube-api-access-bcrcv\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.766424 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-certs\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.766503 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-scripts\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.766539 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-combined-ca-bundle\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.868906 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-config-data\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.868980 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcrcv\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-kube-api-access-bcrcv\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.869019 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-certs\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.869115 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-scripts\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.869160 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-combined-ca-bundle\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.876878 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-combined-ca-bundle\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.878111 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-certs\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.878894 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-config-data\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.881759 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-scripts\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.896297 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcrcv\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-kube-api-access-bcrcv\") pod \"cloudkitty-db-sync-jpj28\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:41 crc kubenswrapper[4768]: I1203 16:45:41.991111 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:45:42 crc kubenswrapper[4768]: I1203 16:45:42.619890 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-jpj28"] Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.302454 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.303135 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-central-agent" containerID="cri-o://0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4" gracePeriod=30 Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.303243 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-notification-agent" containerID="cri-o://f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555" gracePeriod=30 Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.303245 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="proxy-httpd" containerID="cri-o://353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7" gracePeriod=30 Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.303277 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="sg-core" containerID="cri-o://7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a" gracePeriod=30 Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.552284 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="449f6efb-981d-445a-b10a-a8d76f9d027d" path="/var/lib/kubelet/pods/449f6efb-981d-445a-b10a-a8d76f9d027d/volumes" Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.646351 4768 generic.go:334] "Generic (PLEG): container finished" podID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerID="353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7" exitCode=0 Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.646382 4768 generic.go:334] "Generic (PLEG): container finished" podID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerID="7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a" exitCode=2 Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.646398 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerDied","Data":"353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7"} Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.646456 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerDied","Data":"7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a"} Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.649473 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jpj28" event={"ID":"36dbacfe-876b-4926-8214-06db2bf33002","Type":"ContainerStarted","Data":"f8d916799b2f9aff32980fc75d429884a1987d7171703ce413e102395cfb3c22"} Dec 03 16:45:43 crc kubenswrapper[4768]: I1203 16:45:43.971535 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:45:44 crc kubenswrapper[4768]: I1203 16:45:44.666811 4768 generic.go:334] "Generic (PLEG): container finished" podID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerID="0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4" exitCode=0 Dec 03 16:45:44 crc kubenswrapper[4768]: I1203 16:45:44.667075 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerDied","Data":"0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4"} Dec 03 16:45:44 crc kubenswrapper[4768]: I1203 16:45:44.771197 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.349918 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.482656 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-config-data\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.482748 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-ceilometer-tls-certs\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.482774 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-log-httpd\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.482854 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5tkv\" (UniqueName: \"kubernetes.io/projected/b60fbea6-faba-4e4d-a912-83a79c52b6b5-kube-api-access-q5tkv\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.482872 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-run-httpd\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.482907 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-combined-ca-bundle\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.482952 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-sg-core-conf-yaml\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.483048 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-scripts\") pod \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\" (UID: \"b60fbea6-faba-4e4d-a912-83a79c52b6b5\") " Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.483304 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.483676 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.484175 4768 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.484219 4768 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b60fbea6-faba-4e4d-a912-83a79c52b6b5-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.492782 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b60fbea6-faba-4e4d-a912-83a79c52b6b5-kube-api-access-q5tkv" (OuterVolumeSpecName: "kube-api-access-q5tkv") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "kube-api-access-q5tkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.494887 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-scripts" (OuterVolumeSpecName: "scripts") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.516832 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.532478 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.532985 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.554299 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.587725 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.587769 4768 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.587785 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5tkv\" (UniqueName: \"kubernetes.io/projected/b60fbea6-faba-4e4d-a912-83a79c52b6b5-kube-api-access-q5tkv\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.587798 4768 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.594779 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.631825 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-config-data" (OuterVolumeSpecName: "config-data") pod "b60fbea6-faba-4e4d-a912-83a79c52b6b5" (UID: "b60fbea6-faba-4e4d-a912-83a79c52b6b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.689194 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.689225 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b60fbea6-faba-4e4d-a912-83a79c52b6b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.690028 4768 generic.go:334] "Generic (PLEG): container finished" podID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerID="f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555" exitCode=0 Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.690070 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerDied","Data":"f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555"} Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.690101 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b60fbea6-faba-4e4d-a912-83a79c52b6b5","Type":"ContainerDied","Data":"a0b00ecfb27d59266cec4158002634b20eab647b676192bd64fed76edfc01c51"} Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.690117 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.690124 4768 scope.go:117] "RemoveContainer" containerID="353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.729898 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.747947 4768 scope.go:117] "RemoveContainer" containerID="7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.753362 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.765679 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.766093 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-central-agent" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766109 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-central-agent" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.766135 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="proxy-httpd" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766143 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="proxy-httpd" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.766150 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-notification-agent" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766156 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-notification-agent" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.766205 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="sg-core" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766212 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="sg-core" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766399 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-central-agent" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766543 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="sg-core" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766609 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="ceilometer-notification-agent" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.766619 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" containerName="proxy-httpd" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.768681 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.773954 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.774237 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.774420 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.786803 4768 scope.go:117] "RemoveContainer" containerID="f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.795238 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.895844 4768 scope.go:117] "RemoveContainer" containerID="0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.915874 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.916243 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.916267 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtnwj\" (UniqueName: \"kubernetes.io/projected/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-kube-api-access-vtnwj\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.916295 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-run-httpd\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.916344 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-config-data\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.916406 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.916662 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-log-httpd\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.916696 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-scripts\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.917677 4768 scope.go:117] "RemoveContainer" containerID="353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.918688 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7\": container with ID starting with 353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7 not found: ID does not exist" containerID="353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.918718 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7"} err="failed to get container status \"353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7\": rpc error: code = NotFound desc = could not find container \"353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7\": container with ID starting with 353c3c183b8fa7a43164abfcb1b6b17d7994d64e1334ebb8c50a4ae594f0a9f7 not found: ID does not exist" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.918739 4768 scope.go:117] "RemoveContainer" containerID="7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.919103 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a\": container with ID starting with 7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a not found: ID does not exist" containerID="7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.919122 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a"} err="failed to get container status \"7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a\": rpc error: code = NotFound desc = could not find container \"7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a\": container with ID starting with 7444f0cf976ce60870df5d79849166b506bbef29b42bc56d8b41b63d021fec0a not found: ID does not exist" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.919135 4768 scope.go:117] "RemoveContainer" containerID="f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.919523 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555\": container with ID starting with f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555 not found: ID does not exist" containerID="f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.919546 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555"} err="failed to get container status \"f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555\": rpc error: code = NotFound desc = could not find container \"f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555\": container with ID starting with f2e920844e81d3309d780a4107e11b50c3c3b4342cf07b2bfa7b6840f3e8a555 not found: ID does not exist" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.919560 4768 scope.go:117] "RemoveContainer" containerID="0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4" Dec 03 16:45:45 crc kubenswrapper[4768]: E1203 16:45:45.920027 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4\": container with ID starting with 0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4 not found: ID does not exist" containerID="0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4" Dec 03 16:45:45 crc kubenswrapper[4768]: I1203 16:45:45.920048 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4"} err="failed to get container status \"0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4\": rpc error: code = NotFound desc = could not find container \"0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4\": container with ID starting with 0c9131ef7612cdb7dd9b6138befb4170eb9c21a972455c5b3c1f2cb5beca97b4 not found: ID does not exist" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018068 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtnwj\" (UniqueName: \"kubernetes.io/projected/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-kube-api-access-vtnwj\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018174 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-run-httpd\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018247 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-config-data\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018348 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018402 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-log-httpd\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018446 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-scripts\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018522 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.018583 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.019189 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-run-httpd\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.019869 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-log-httpd\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.024368 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.025332 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.029599 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-scripts\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.036063 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.037444 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtnwj\" (UniqueName: \"kubernetes.io/projected/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-kube-api-access-vtnwj\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.054897 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4-config-data\") pod \"ceilometer-0\" (UID: \"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4\") " pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.100755 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.600012 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 03 16:45:46 crc kubenswrapper[4768]: W1203 16:45:46.608688 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd5a1be35_5f32_4810_b19f_6c6f7e8aa7f4.slice/crio-f9bd8292adf15825b9d4ea901c621fe1fb0cfc8cf6415c7493871cdfca8caa54 WatchSource:0}: Error finding container f9bd8292adf15825b9d4ea901c621fe1fb0cfc8cf6415c7493871cdfca8caa54: Status 404 returned error can't find the container with id f9bd8292adf15825b9d4ea901c621fe1fb0cfc8cf6415c7493871cdfca8caa54 Dec 03 16:45:46 crc kubenswrapper[4768]: I1203 16:45:46.723788 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4","Type":"ContainerStarted","Data":"f9bd8292adf15825b9d4ea901c621fe1fb0cfc8cf6415c7493871cdfca8caa54"} Dec 03 16:45:47 crc kubenswrapper[4768]: I1203 16:45:47.543234 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b60fbea6-faba-4e4d-a912-83a79c52b6b5" path="/var/lib/kubelet/pods/b60fbea6-faba-4e4d-a912-83a79c52b6b5/volumes" Dec 03 16:45:48 crc kubenswrapper[4768]: I1203 16:45:48.500219 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="rabbitmq" containerID="cri-o://5b3e8d3049ece4d30ec1e966081fa2d3269e38758d0a529ab387872629abf7ab" gracePeriod=604796 Dec 03 16:45:49 crc kubenswrapper[4768]: I1203 16:45:49.418203 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="rabbitmq" containerID="cri-o://7af7ab7bf1c533b1ddfef6547d7db892461c1c6ca7defbd233606f8cab35dff6" gracePeriod=604796 Dec 03 16:45:52 crc kubenswrapper[4768]: E1203 16:45:52.982468 4768 kubelet.go:2526] "Housekeeping took longer than expected" err="housekeeping took too long" expected="1s" actual="1.451s" Dec 03 16:45:52 crc kubenswrapper[4768]: I1203 16:45:52.987443 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Dec 03 16:45:53 crc kubenswrapper[4768]: I1203 16:45:53.272666 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: connect: connection refused" Dec 03 16:45:58 crc kubenswrapper[4768]: I1203 16:45:58.030006 4768 generic.go:334] "Generic (PLEG): container finished" podID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerID="7af7ab7bf1c533b1ddfef6547d7db892461c1c6ca7defbd233606f8cab35dff6" exitCode=0 Dec 03 16:45:58 crc kubenswrapper[4768]: I1203 16:45:58.030100 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5061b6be-b64d-4dfd-8431-701066b8cefa","Type":"ContainerDied","Data":"7af7ab7bf1c533b1ddfef6547d7db892461c1c6ca7defbd233606f8cab35dff6"} Dec 03 16:45:58 crc kubenswrapper[4768]: I1203 16:45:58.032373 4768 generic.go:334] "Generic (PLEG): container finished" podID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerID="5b3e8d3049ece4d30ec1e966081fa2d3269e38758d0a529ab387872629abf7ab" exitCode=0 Dec 03 16:45:58 crc kubenswrapper[4768]: I1203 16:45:58.032418 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0b8fcf68-a566-4dc2-9137-4b1e85eede0f","Type":"ContainerDied","Data":"5b3e8d3049ece4d30ec1e966081fa2d3269e38758d0a529ab387872629abf7ab"} Dec 03 16:45:59 crc kubenswrapper[4768]: I1203 16:45:59.535486 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:45:59 crc kubenswrapper[4768]: E1203 16:45:59.536043 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:46:07 crc kubenswrapper[4768]: I1203 16:46:07.988026 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: i/o timeout" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.098480 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.156620 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"5061b6be-b64d-4dfd-8431-701066b8cefa","Type":"ContainerDied","Data":"3a5f67b1eb54af382abc3a1a669207f11d0e9e8bb7f6da38e614b011aa8f3849"} Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.156669 4768 scope.go:117] "RemoveContainer" containerID="7af7ab7bf1c533b1ddfef6547d7db892461c1c6ca7defbd233606f8cab35dff6" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.156825 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.168377 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-plugins\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.169820 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.169862 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-erlang-cookie\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.169902 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-plugins-conf\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.170174 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5061b6be-b64d-4dfd-8431-701066b8cefa-pod-info\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.170544 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.171415 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.173735 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.173797 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-config-data\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.173858 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-tls\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.173898 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np78t\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-kube-api-access-np78t\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.173922 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-confd\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.173966 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-server-conf\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.173993 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5061b6be-b64d-4dfd-8431-701066b8cefa-erlang-cookie-secret\") pod \"5061b6be-b64d-4dfd-8431-701066b8cefa\" (UID: \"5061b6be-b64d-4dfd-8431-701066b8cefa\") " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.175463 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.175492 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.175506 4768 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.180632 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.202077 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-kube-api-access-np78t" (OuterVolumeSpecName: "kube-api-access-np78t") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "kube-api-access-np78t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.218993 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/5061b6be-b64d-4dfd-8431-701066b8cefa-pod-info" (OuterVolumeSpecName: "pod-info") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.222935 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5061b6be-b64d-4dfd-8431-701066b8cefa-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.264815 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-w4c5j"] Dec 03 16:46:08 crc kubenswrapper[4768]: E1203 16:46:08.265382 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="rabbitmq" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.265401 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="rabbitmq" Dec 03 16:46:08 crc kubenswrapper[4768]: E1203 16:46:08.265424 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="setup-container" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.265433 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="setup-container" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.265704 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="rabbitmq" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.266962 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.272387 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-config-data" (OuterVolumeSpecName: "config-data") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.272526 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.108:5671: i/o timeout" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.272993 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.274201 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-w4c5j"] Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.296124 4768 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/5061b6be-b64d-4dfd-8431-701066b8cefa-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.296154 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.296164 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.296174 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np78t\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-kube-api-access-np78t\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.296184 4768 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/5061b6be-b64d-4dfd-8431-701066b8cefa-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.324530 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284" (OuterVolumeSpecName: "persistence") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "pvc-e4caf4dd-f790-4266-bf56-710a89e69284". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.369652 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.390802 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-server-conf" (OuterVolumeSpecName: "server-conf") pod "5061b6be-b64d-4dfd-8431-701066b8cefa" (UID: "5061b6be-b64d-4dfd-8431-701066b8cefa"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.398479 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.398549 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.398669 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.398738 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.398785 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsqzv\" (UniqueName: \"kubernetes.io/projected/538d388d-64ae-4901-8a6f-7ae1e98ee636-kube-api-access-zsqzv\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.398989 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.399156 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-config\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.399427 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") on node \"crc\" " Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.399459 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/5061b6be-b64d-4dfd-8431-701066b8cefa-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.399475 4768 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/5061b6be-b64d-4dfd-8431-701066b8cefa-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.436691 4768 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.437171 4768 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-e4caf4dd-f790-4266-bf56-710a89e69284" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284") on node "crc" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.497199 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.503972 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.504029 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsqzv\" (UniqueName: \"kubernetes.io/projected/538d388d-64ae-4901-8a6f-7ae1e98ee636-kube-api-access-zsqzv\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.504066 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.504111 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-config\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.504184 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.504205 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.504257 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.504313 4768 reconciler_common.go:293] "Volume detached for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.505078 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-openstack-edpm-ipam\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.505508 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-config\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.505637 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-swift-storage-0\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.506195 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-svc\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.509378 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-nb\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.509447 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.510858 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-sb\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.520773 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsqzv\" (UniqueName: \"kubernetes.io/projected/538d388d-64ae-4901-8a6f-7ae1e98ee636-kube-api-access-zsqzv\") pod \"dnsmasq-dns-dbb88bf8c-w4c5j\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.523791 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.528286 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.532456 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.532656 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.532898 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.533008 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.533158 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.533255 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.533369 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-54tnc" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.539828 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.664490 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708146 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708275 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708365 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708403 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvmxf\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-kube-api-access-pvmxf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708485 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708565 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708633 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708679 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708712 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708760 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.708793 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811105 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811181 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811253 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811283 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvmxf\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-kube-api-access-pvmxf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811351 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811393 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811437 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811474 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811506 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811550 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.811578 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.813013 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.813458 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.814335 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.814708 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.815741 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.815818 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/989aecef5bc372eec88750a42cc238896432583c1ff4ea1eb5b415eecee9303d/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.816561 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.816943 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.817127 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.826912 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.827178 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.838446 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvmxf\" (UniqueName: \"kubernetes.io/projected/e1ebf32c-184a-46da-8f0e-e955fb1fa5e8-kube-api-access-pvmxf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:08 crc kubenswrapper[4768]: I1203 16:46:08.874921 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-e4caf4dd-f790-4266-bf56-710a89e69284\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-e4caf4dd-f790-4266-bf56-710a89e69284\") pod \"rabbitmq-cell1-server-0\" (UID: \"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.150042 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.536446 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.552495 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5061b6be-b64d-4dfd-8431-701066b8cefa" path="/var/lib/kubelet/pods/5061b6be-b64d-4dfd-8431-701066b8cefa/volumes" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.630663 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-plugins-conf\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.630791 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-pod-info\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.630841 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-plugins\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.630883 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-config-data\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.630941 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-tls\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.630977 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-erlang-cookie-secret\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.631046 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-erlang-cookie\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.631140 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-server-conf\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.632511 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.632589 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgtjd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-kube-api-access-qgtjd\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.632642 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-confd\") pod \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\" (UID: \"0b8fcf68-a566-4dc2-9137-4b1e85eede0f\") " Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.633045 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.633976 4768 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.648482 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.656678 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.658074 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-pod-info" (OuterVolumeSpecName: "pod-info") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.661184 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-kube-api-access-qgtjd" (OuterVolumeSpecName: "kube-api-access-qgtjd") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "kube-api-access-qgtjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.665694 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.695139 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.736524 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.736559 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgtjd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-kube-api-access-qgtjd\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.736571 4768 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-pod-info\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.736583 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.736612 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:09 crc kubenswrapper[4768]: I1203 16:46:09.736625 4768 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.187678 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"0b8fcf68-a566-4dc2-9137-4b1e85eede0f","Type":"ContainerDied","Data":"0778e7b2e0a6e3eec1d43a83f1021c0760c71db6b1355b61b4334497658d853a"} Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.187729 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.334700 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-server-conf" (OuterVolumeSpecName: "server-conf") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.334715 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-config-data" (OuterVolumeSpecName: "config-data") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:10 crc kubenswrapper[4768]: E1203 16:46:10.335966 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 03 16:46:10 crc kubenswrapper[4768]: E1203 16:46:10.336003 4768 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 03 16:46:10 crc kubenswrapper[4768]: E1203 16:46:10.336113 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bcrcv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-jpj28_openstack(36dbacfe-876b-4926-8214-06db2bf33002): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:46:10 crc kubenswrapper[4768]: E1203 16:46:10.337610 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-jpj28" podUID="36dbacfe-876b-4926-8214-06db2bf33002" Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.344789 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d" (OuterVolumeSpecName: "persistence") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.350796 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.350848 4768 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-server-conf\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.350879 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") on node \"crc\" " Dec 03 16:46:10 crc kubenswrapper[4768]: I1203 16:46:10.532190 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:46:10 crc kubenswrapper[4768]: E1203 16:46:10.532579 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:46:11 crc kubenswrapper[4768]: E1203 16:46:11.201428 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested\\\"\"" pod="openstack/cloudkitty-db-sync-jpj28" podUID="36dbacfe-876b-4926-8214-06db2bf33002" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.308526 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0b8fcf68-a566-4dc2-9137-4b1e85eede0f" (UID: "0b8fcf68-a566-4dc2-9137-4b1e85eede0f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.316065 4768 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.316544 4768 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d") on node "crc" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.371199 4768 reconciler_common.go:293] "Volume detached for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.371250 4768 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b8fcf68-a566-4dc2-9137-4b1e85eede0f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.481556 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.491889 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.507323 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:46:11 crc kubenswrapper[4768]: E1203 16:46:11.508031 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="rabbitmq" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.508049 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="rabbitmq" Dec 03 16:46:11 crc kubenswrapper[4768]: E1203 16:46:11.508065 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="setup-container" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.508072 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="setup-container" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.508276 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" containerName="rabbitmq" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.509357 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.511247 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.511467 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-x2hkw" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.511250 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.511708 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.511736 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.512102 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.513148 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.527444 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.552913 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b8fcf68-a566-4dc2-9137-4b1e85eede0f" path="/var/lib/kubelet/pods/0b8fcf68-a566-4dc2-9137-4b1e85eede0f/volumes" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.574707 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7f83f074-b1a6-4d38-8a36-a6335766064f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575026 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575102 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdx49\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-kube-api-access-jdx49\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575198 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575290 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7f83f074-b1a6-4d38-8a36-a6335766064f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575367 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575444 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575512 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575634 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575738 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.575855 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-config-data\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.677686 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.677748 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.677797 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.677860 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.677932 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-config-data\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.677989 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7f83f074-b1a6-4d38-8a36-a6335766064f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.678019 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdx49\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-kube-api-access-jdx49\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.678045 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.678097 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.678140 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7f83f074-b1a6-4d38-8a36-a6335766064f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.678179 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.678277 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.678998 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.679647 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-config-data\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.679734 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.680634 4768 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.680660 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/34f8a2e8feba8e1195031a4c36493954fd2d6f8faf48f707af875a92cd7c2a85/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.681852 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7f83f074-b1a6-4d38-8a36-a6335766064f-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.683397 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.683709 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7f83f074-b1a6-4d38-8a36-a6335766064f-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.683751 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7f83f074-b1a6-4d38-8a36-a6335766064f-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.690234 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.696850 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdx49\" (UniqueName: \"kubernetes.io/projected/7f83f074-b1a6-4d38-8a36-a6335766064f-kube-api-access-jdx49\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.733015 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8008caff-38bf-4b47-aa3f-748f3a97d23d\") pod \"rabbitmq-server-0\" (UID: \"7f83f074-b1a6-4d38-8a36-a6335766064f\") " pod="openstack/rabbitmq-server-0" Dec 03 16:46:11 crc kubenswrapper[4768]: I1203 16:46:11.851683 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 03 16:46:15 crc kubenswrapper[4768]: I1203 16:46:15.573403 4768 scope.go:117] "RemoveContainer" containerID="294cadb0d1f3f85d9f524a4aefc23a1ebc5763fc2780bdaeac0caaf1e2d2aa87" Dec 03 16:46:15 crc kubenswrapper[4768]: I1203 16:46:15.815702 4768 scope.go:117] "RemoveContainer" containerID="5b3e8d3049ece4d30ec1e966081fa2d3269e38758d0a529ab387872629abf7ab" Dec 03 16:46:15 crc kubenswrapper[4768]: I1203 16:46:15.869027 4768 scope.go:117] "RemoveContainer" containerID="a8e17f753020d4487160b9ae587a0c00bfee7bf215ce333c439e75659175f6e7" Dec 03 16:46:16 crc kubenswrapper[4768]: W1203 16:46:16.318694 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1ebf32c_184a_46da_8f0e_e955fb1fa5e8.slice/crio-84c61565f4eba338c97e89e04629111de73dd71cbc9afc652e6932d2757e995c WatchSource:0}: Error finding container 84c61565f4eba338c97e89e04629111de73dd71cbc9afc652e6932d2757e995c: Status 404 returned error can't find the container with id 84c61565f4eba338c97e89e04629111de73dd71cbc9afc652e6932d2757e995c Dec 03 16:46:16 crc kubenswrapper[4768]: I1203 16:46:16.327207 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 03 16:46:16 crc kubenswrapper[4768]: I1203 16:46:16.344123 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 03 16:46:16 crc kubenswrapper[4768]: I1203 16:46:16.373130 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-w4c5j"] Dec 03 16:46:17 crc kubenswrapper[4768]: I1203 16:46:17.275900 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" event={"ID":"538d388d-64ae-4901-8a6f-7ae1e98ee636","Type":"ContainerStarted","Data":"f29ae131e020848366d609dbe71e85ef82b2f27eaa51b43903f4e06fd872426c"} Dec 03 16:46:17 crc kubenswrapper[4768]: I1203 16:46:17.277273 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8","Type":"ContainerStarted","Data":"84c61565f4eba338c97e89e04629111de73dd71cbc9afc652e6932d2757e995c"} Dec 03 16:46:17 crc kubenswrapper[4768]: I1203 16:46:17.279054 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7f83f074-b1a6-4d38-8a36-a6335766064f","Type":"ContainerStarted","Data":"652e33ba098a0de4699343bc1738ceafb5672770428df9fc95ddc83ce8398991"} Dec 03 16:46:18 crc kubenswrapper[4768]: E1203 16:46:18.032749 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Dec 03 16:46:18 crc kubenswrapper[4768]: E1203 16:46:18.033061 4768 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested" Dec 03 16:46:18 crc kubenswrapper[4768]: E1203 16:46:18.033197 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n74h5f7h7dh5c4h565h54dhf6h647h69h5c7h56h595h5d7hbh5dh56dh65bh545h575h594h5c9hd8h667h696hd8h666h7h6fh675h59ch59fh598q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vtnwj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 16:46:18 crc kubenswrapper[4768]: I1203 16:46:18.299942 4768 generic.go:334] "Generic (PLEG): container finished" podID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerID="b5fdb644727c65a93b3f20e93843cf1e280912aa14e06028bad6c5f27d359cba" exitCode=0 Dec 03 16:46:18 crc kubenswrapper[4768]: I1203 16:46:18.300010 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" event={"ID":"538d388d-64ae-4901-8a6f-7ae1e98ee636","Type":"ContainerDied","Data":"b5fdb644727c65a93b3f20e93843cf1e280912aa14e06028bad6c5f27d359cba"} Dec 03 16:46:19 crc kubenswrapper[4768]: I1203 16:46:19.311713 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4","Type":"ContainerStarted","Data":"dfb89925241b66fa084b2d5becdf0ae0fef8caf952f4934229d8753b4cd8014b"} Dec 03 16:46:19 crc kubenswrapper[4768]: I1203 16:46:19.314050 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" event={"ID":"538d388d-64ae-4901-8a6f-7ae1e98ee636","Type":"ContainerStarted","Data":"3afb9d2072aa2446a8408be558f81091ec274fd2ac6737580caeacc8342b0639"} Dec 03 16:46:19 crc kubenswrapper[4768]: I1203 16:46:19.314186 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:19 crc kubenswrapper[4768]: I1203 16:46:19.338760 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" podStartSLOduration=11.338739941 podStartE2EDuration="11.338739941s" podCreationTimestamp="2025-12-03 16:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:46:19.333141705 +0000 UTC m=+1676.252478138" watchObservedRunningTime="2025-12-03 16:46:19.338739941 +0000 UTC m=+1676.258076384" Dec 03 16:46:20 crc kubenswrapper[4768]: I1203 16:46:20.332483 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7f83f074-b1a6-4d38-8a36-a6335766064f","Type":"ContainerStarted","Data":"506332fdd300d201bc3d2728701d456a81b52d888b869b2d15c8a9b2e1d574be"} Dec 03 16:46:20 crc kubenswrapper[4768]: I1203 16:46:20.334326 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8","Type":"ContainerStarted","Data":"bdc5add6641b503e8bc484ae0191f580785df109e72287006e3e9ee95e24769c"} Dec 03 16:46:21 crc kubenswrapper[4768]: I1203 16:46:21.347912 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4","Type":"ContainerStarted","Data":"66cc32dcac119eefbbf968c5d5af6fa8c267eebce0ca01165deb288c4a696581"} Dec 03 16:46:21 crc kubenswrapper[4768]: I1203 16:46:21.532955 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:46:21 crc kubenswrapper[4768]: E1203 16:46:21.533369 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:46:22 crc kubenswrapper[4768]: E1203 16:46:22.331275 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4" Dec 03 16:46:22 crc kubenswrapper[4768]: I1203 16:46:22.358614 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4","Type":"ContainerStarted","Data":"1d708d16b9492c5126c549a77ed8dd2bb0be1613aaa52091b084cc6c0be1e7ea"} Dec 03 16:46:22 crc kubenswrapper[4768]: I1203 16:46:22.358821 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 03 16:46:22 crc kubenswrapper[4768]: E1203 16:46:22.360410 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4" Dec 03 16:46:23 crc kubenswrapper[4768]: E1203 16:46:23.370515 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-ceilometer-central:current-tested\\\"\"" pod="openstack/ceilometer-0" podUID="d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4" Dec 03 16:46:26 crc kubenswrapper[4768]: I1203 16:46:26.536030 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:46:26 crc kubenswrapper[4768]: I1203 16:46:26.950152 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 16:46:27 crc kubenswrapper[4768]: I1203 16:46:27.414061 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jpj28" event={"ID":"36dbacfe-876b-4926-8214-06db2bf33002","Type":"ContainerStarted","Data":"54312468cb34f855971cf5f33681cfd2853f75b1b247529929dcf96bda996bba"} Dec 03 16:46:27 crc kubenswrapper[4768]: I1203 16:46:27.454838 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-jpj28" podStartSLOduration=2.1413036229999998 podStartE2EDuration="46.454819742s" podCreationTimestamp="2025-12-03 16:45:41 +0000 UTC" firstStartedPulling="2025-12-03 16:45:42.633831061 +0000 UTC m=+1639.553167484" lastFinishedPulling="2025-12-03 16:46:26.94734718 +0000 UTC m=+1683.866683603" observedRunningTime="2025-12-03 16:46:27.432824747 +0000 UTC m=+1684.352161180" watchObservedRunningTime="2025-12-03 16:46:27.454819742 +0000 UTC m=+1684.374156165" Dec 03 16:46:28 crc kubenswrapper[4768]: I1203 16:46:28.666652 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:28 crc kubenswrapper[4768]: I1203 16:46:28.747726 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-s9mgv"] Dec 03 16:46:28 crc kubenswrapper[4768]: I1203 16:46:28.747949 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerName="dnsmasq-dns" containerID="cri-o://ab85698c277b7b82ffa6d8351724a304c6f550993a10d7539404a7e4aeea57af" gracePeriod=10 Dec 03 16:46:29 crc kubenswrapper[4768]: I1203 16:46:29.985668 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-rckkv"] Dec 03 16:46:29 crc kubenswrapper[4768]: I1203 16:46:29.997862 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.053776 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-rckkv"] Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.098301 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-config\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.098426 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.098471 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-dns-svc\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.098505 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.098552 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5m9q\" (UniqueName: \"kubernetes.io/projected/bead7c34-6203-449b-b855-48ef80b18409-kube-api-access-d5m9q\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.098611 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.098812 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.148917 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.224:5353: connect: connection refused" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.200554 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.200708 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-config\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.200795 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.200828 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-dns-svc\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.200856 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.200902 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5m9q\" (UniqueName: \"kubernetes.io/projected/bead7c34-6203-449b-b855-48ef80b18409-kube-api-access-d5m9q\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.200939 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.201808 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-config\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.201863 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-dns-swift-storage-0\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.201986 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-ovsdbserver-sb\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.201998 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-dns-svc\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.202149 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-ovsdbserver-nb\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.202453 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bead7c34-6203-449b-b855-48ef80b18409-openstack-edpm-ipam\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.223705 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5m9q\" (UniqueName: \"kubernetes.io/projected/bead7c34-6203-449b-b855-48ef80b18409-kube-api-access-d5m9q\") pod \"dnsmasq-dns-85f64749dc-rckkv\" (UID: \"bead7c34-6203-449b-b855-48ef80b18409\") " pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.338658 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:30 crc kubenswrapper[4768]: W1203 16:46:30.906212 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbead7c34_6203_449b_b855_48ef80b18409.slice/crio-1dc267e02f9f4cc67b86e50e8b6ead905246ffff3d308370746c4c0049b9532e WatchSource:0}: Error finding container 1dc267e02f9f4cc67b86e50e8b6ead905246ffff3d308370746c4c0049b9532e: Status 404 returned error can't find the container with id 1dc267e02f9f4cc67b86e50e8b6ead905246ffff3d308370746c4c0049b9532e Dec 03 16:46:30 crc kubenswrapper[4768]: I1203 16:46:30.914690 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85f64749dc-rckkv"] Dec 03 16:46:31 crc kubenswrapper[4768]: I1203 16:46:31.454089 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" event={"ID":"bead7c34-6203-449b-b855-48ef80b18409","Type":"ContainerStarted","Data":"1dc267e02f9f4cc67b86e50e8b6ead905246ffff3d308370746c4c0049b9532e"} Dec 03 16:46:34 crc kubenswrapper[4768]: I1203 16:46:34.584062 4768 generic.go:334] "Generic (PLEG): container finished" podID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerID="ab85698c277b7b82ffa6d8351724a304c6f550993a10d7539404a7e4aeea57af" exitCode=0 Dec 03 16:46:34 crc kubenswrapper[4768]: I1203 16:46:34.584156 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" event={"ID":"5efd08b0-6167-405c-9da0-121a9e3c3b55","Type":"ContainerDied","Data":"ab85698c277b7b82ffa6d8351724a304c6f550993a10d7539404a7e4aeea57af"} Dec 03 16:46:34 crc kubenswrapper[4768]: I1203 16:46:34.987773 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.002206 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-svc\") pod \"5efd08b0-6167-405c-9da0-121a9e3c3b55\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.002252 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-config\") pod \"5efd08b0-6167-405c-9da0-121a9e3c3b55\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.002341 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv9d4\" (UniqueName: \"kubernetes.io/projected/5efd08b0-6167-405c-9da0-121a9e3c3b55-kube-api-access-wv9d4\") pod \"5efd08b0-6167-405c-9da0-121a9e3c3b55\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.002365 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-swift-storage-0\") pod \"5efd08b0-6167-405c-9da0-121a9e3c3b55\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.002400 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-nb\") pod \"5efd08b0-6167-405c-9da0-121a9e3c3b55\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.002558 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-sb\") pod \"5efd08b0-6167-405c-9da0-121a9e3c3b55\" (UID: \"5efd08b0-6167-405c-9da0-121a9e3c3b55\") " Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.030737 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5efd08b0-6167-405c-9da0-121a9e3c3b55-kube-api-access-wv9d4" (OuterVolumeSpecName: "kube-api-access-wv9d4") pod "5efd08b0-6167-405c-9da0-121a9e3c3b55" (UID: "5efd08b0-6167-405c-9da0-121a9e3c3b55"). InnerVolumeSpecName "kube-api-access-wv9d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.063821 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5efd08b0-6167-405c-9da0-121a9e3c3b55" (UID: "5efd08b0-6167-405c-9da0-121a9e3c3b55"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.079248 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5efd08b0-6167-405c-9da0-121a9e3c3b55" (UID: "5efd08b0-6167-405c-9da0-121a9e3c3b55"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.090159 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5efd08b0-6167-405c-9da0-121a9e3c3b55" (UID: "5efd08b0-6167-405c-9da0-121a9e3c3b55"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.100097 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-config" (OuterVolumeSpecName: "config") pod "5efd08b0-6167-405c-9da0-121a9e3c3b55" (UID: "5efd08b0-6167-405c-9da0-121a9e3c3b55"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.104215 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv9d4\" (UniqueName: \"kubernetes.io/projected/5efd08b0-6167-405c-9da0-121a9e3c3b55-kube-api-access-wv9d4\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.104242 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.104251 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.104260 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.104268 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.106333 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5efd08b0-6167-405c-9da0-121a9e3c3b55" (UID: "5efd08b0-6167-405c-9da0-121a9e3c3b55"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.206811 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5efd08b0-6167-405c-9da0-121a9e3c3b55-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.598951 4768 generic.go:334] "Generic (PLEG): container finished" podID="36dbacfe-876b-4926-8214-06db2bf33002" containerID="54312468cb34f855971cf5f33681cfd2853f75b1b247529929dcf96bda996bba" exitCode=0 Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.599040 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jpj28" event={"ID":"36dbacfe-876b-4926-8214-06db2bf33002","Type":"ContainerDied","Data":"54312468cb34f855971cf5f33681cfd2853f75b1b247529929dcf96bda996bba"} Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.601263 4768 generic.go:334] "Generic (PLEG): container finished" podID="bead7c34-6203-449b-b855-48ef80b18409" containerID="9eeaac9c4474a51332c6acfd2da4c2f39475d79d8b0f6ff0e6201e31da315d1d" exitCode=0 Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.601306 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" event={"ID":"bead7c34-6203-449b-b855-48ef80b18409","Type":"ContainerDied","Data":"9eeaac9c4474a51332c6acfd2da4c2f39475d79d8b0f6ff0e6201e31da315d1d"} Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.604715 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" event={"ID":"5efd08b0-6167-405c-9da0-121a9e3c3b55","Type":"ContainerDied","Data":"bd1f041f6a0588651c4c3e4e1fb0a856245f5828149213d8782996aa0d4f048f"} Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.604772 4768 scope.go:117] "RemoveContainer" containerID="ab85698c277b7b82ffa6d8351724a304c6f550993a10d7539404a7e4aeea57af" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.604866 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5fd9b586ff-s9mgv" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.641135 4768 scope.go:117] "RemoveContainer" containerID="147ec30adeefebdacc3803387aaac37aa5e4ca8ad0d7b60f92db5ef60a5a9c13" Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.651034 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-s9mgv"] Dec 03 16:46:35 crc kubenswrapper[4768]: I1203 16:46:35.660777 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5fd9b586ff-s9mgv"] Dec 03 16:46:36 crc kubenswrapper[4768]: I1203 16:46:36.532073 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:46:36 crc kubenswrapper[4768]: E1203 16:46:36.532797 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:46:36 crc kubenswrapper[4768]: I1203 16:46:36.551440 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 03 16:46:36 crc kubenswrapper[4768]: I1203 16:46:36.621413 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" event={"ID":"bead7c34-6203-449b-b855-48ef80b18409","Type":"ContainerStarted","Data":"049f2da9c6723a3fc46510fca05c4eb96d50b5a7f7b6fdf4285cbeffb4bdab05"} Dec 03 16:46:36 crc kubenswrapper[4768]: I1203 16:46:36.623036 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:36 crc kubenswrapper[4768]: I1203 16:46:36.680987 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" podStartSLOduration=7.6809645 podStartE2EDuration="7.6809645s" podCreationTimestamp="2025-12-03 16:46:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:46:36.660028964 +0000 UTC m=+1693.579365437" watchObservedRunningTime="2025-12-03 16:46:36.6809645 +0000 UTC m=+1693.600300923" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.068099 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.104744 4768 scope.go:117] "RemoveContainer" containerID="fb558adef74ff27918fd9121bf631dc251ee93cd72bde689b18eaa3637b793d0" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.155018 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-config-data\") pod \"36dbacfe-876b-4926-8214-06db2bf33002\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.155099 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-combined-ca-bundle\") pod \"36dbacfe-876b-4926-8214-06db2bf33002\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.155182 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-scripts\") pod \"36dbacfe-876b-4926-8214-06db2bf33002\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.155230 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-certs\") pod \"36dbacfe-876b-4926-8214-06db2bf33002\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.155324 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcrcv\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-kube-api-access-bcrcv\") pod \"36dbacfe-876b-4926-8214-06db2bf33002\" (UID: \"36dbacfe-876b-4926-8214-06db2bf33002\") " Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.161254 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-kube-api-access-bcrcv" (OuterVolumeSpecName: "kube-api-access-bcrcv") pod "36dbacfe-876b-4926-8214-06db2bf33002" (UID: "36dbacfe-876b-4926-8214-06db2bf33002"). InnerVolumeSpecName "kube-api-access-bcrcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.162153 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-scripts" (OuterVolumeSpecName: "scripts") pod "36dbacfe-876b-4926-8214-06db2bf33002" (UID: "36dbacfe-876b-4926-8214-06db2bf33002"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.166802 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-certs" (OuterVolumeSpecName: "certs") pod "36dbacfe-876b-4926-8214-06db2bf33002" (UID: "36dbacfe-876b-4926-8214-06db2bf33002"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.198871 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-config-data" (OuterVolumeSpecName: "config-data") pod "36dbacfe-876b-4926-8214-06db2bf33002" (UID: "36dbacfe-876b-4926-8214-06db2bf33002"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.202787 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36dbacfe-876b-4926-8214-06db2bf33002" (UID: "36dbacfe-876b-4926-8214-06db2bf33002"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.257989 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcrcv\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-kube-api-access-bcrcv\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.258038 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.258053 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.258064 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36dbacfe-876b-4926-8214-06db2bf33002-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.258079 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/36dbacfe-876b-4926-8214-06db2bf33002-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.542698 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" path="/var/lib/kubelet/pods/5efd08b0-6167-405c-9da0-121a9e3c3b55/volumes" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.637943 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-jpj28" event={"ID":"36dbacfe-876b-4926-8214-06db2bf33002","Type":"ContainerDied","Data":"f8d916799b2f9aff32980fc75d429884a1987d7171703ce413e102395cfb3c22"} Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.638234 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8d916799b2f9aff32980fc75d429884a1987d7171703ce413e102395cfb3c22" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.637974 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-jpj28" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.706536 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-wx7wk"] Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.717768 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-wx7wk"] Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.801342 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-c6rx6"] Dec 03 16:46:37 crc kubenswrapper[4768]: E1203 16:46:37.801754 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerName="dnsmasq-dns" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.801771 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerName="dnsmasq-dns" Dec 03 16:46:37 crc kubenswrapper[4768]: E1203 16:46:37.801811 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36dbacfe-876b-4926-8214-06db2bf33002" containerName="cloudkitty-db-sync" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.801818 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="36dbacfe-876b-4926-8214-06db2bf33002" containerName="cloudkitty-db-sync" Dec 03 16:46:37 crc kubenswrapper[4768]: E1203 16:46:37.801831 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerName="init" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.801838 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerName="init" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.802019 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="5efd08b0-6167-405c-9da0-121a9e3c3b55" containerName="dnsmasq-dns" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.802050 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="36dbacfe-876b-4926-8214-06db2bf33002" containerName="cloudkitty-db-sync" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.803141 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.805654 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.818824 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-c6rx6"] Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.871864 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5g5v5\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-kube-api-access-5g5v5\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.871932 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-config-data\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.871980 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-certs\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.872006 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-scripts\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.872042 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-combined-ca-bundle\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.972891 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5g5v5\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-kube-api-access-5g5v5\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.972993 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-config-data\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.973068 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-certs\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.973102 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-scripts\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.973150 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-combined-ca-bundle\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.978189 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-certs\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.978438 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-combined-ca-bundle\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.978816 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-config-data\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.978996 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-scripts\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:37 crc kubenswrapper[4768]: I1203 16:46:37.996007 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5g5v5\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-kube-api-access-5g5v5\") pod \"cloudkitty-storageinit-c6rx6\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:38 crc kubenswrapper[4768]: I1203 16:46:38.148187 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:38 crc kubenswrapper[4768]: I1203 16:46:38.643458 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-c6rx6"] Dec 03 16:46:38 crc kubenswrapper[4768]: W1203 16:46:38.645672 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode852a88e_fb99_477e_9b55_fa57a654c1f2.slice/crio-02301d5335d42acdd8dcc75369b7df9793c70b5626b2f90cbba921c3b20705fa WatchSource:0}: Error finding container 02301d5335d42acdd8dcc75369b7df9793c70b5626b2f90cbba921c3b20705fa: Status 404 returned error can't find the container with id 02301d5335d42acdd8dcc75369b7df9793c70b5626b2f90cbba921c3b20705fa Dec 03 16:46:38 crc kubenswrapper[4768]: I1203 16:46:38.656510 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4","Type":"ContainerStarted","Data":"0f4c1b36ce634c850adf128865e54b3b397aca8abb488dacf1329d3ede2417ab"} Dec 03 16:46:38 crc kubenswrapper[4768]: I1203 16:46:38.677122 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.257950498 podStartE2EDuration="53.677103974s" podCreationTimestamp="2025-12-03 16:45:45 +0000 UTC" firstStartedPulling="2025-12-03 16:45:46.610978154 +0000 UTC m=+1643.530314577" lastFinishedPulling="2025-12-03 16:46:38.03013162 +0000 UTC m=+1694.949468053" observedRunningTime="2025-12-03 16:46:38.674083135 +0000 UTC m=+1695.593419558" watchObservedRunningTime="2025-12-03 16:46:38.677103974 +0000 UTC m=+1695.596440397" Dec 03 16:46:39 crc kubenswrapper[4768]: I1203 16:46:39.544726 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45d9dcbe-2050-4bb6-b5f2-8836006e5085" path="/var/lib/kubelet/pods/45d9dcbe-2050-4bb6-b5f2-8836006e5085/volumes" Dec 03 16:46:39 crc kubenswrapper[4768]: I1203 16:46:39.670527 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c6rx6" event={"ID":"e852a88e-fb99-477e-9b55-fa57a654c1f2","Type":"ContainerStarted","Data":"6c2db4874b3370a6b85deb15833e5a3c12b03f09a95a58d01b31ac6b3bfaaa7f"} Dec 03 16:46:39 crc kubenswrapper[4768]: I1203 16:46:39.670622 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c6rx6" event={"ID":"e852a88e-fb99-477e-9b55-fa57a654c1f2","Type":"ContainerStarted","Data":"02301d5335d42acdd8dcc75369b7df9793c70b5626b2f90cbba921c3b20705fa"} Dec 03 16:46:39 crc kubenswrapper[4768]: I1203 16:46:39.695204 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-c6rx6" podStartSLOduration=2.695183999 podStartE2EDuration="2.695183999s" podCreationTimestamp="2025-12-03 16:46:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:46:39.685203359 +0000 UTC m=+1696.604539782" watchObservedRunningTime="2025-12-03 16:46:39.695183999 +0000 UTC m=+1696.614520412" Dec 03 16:46:40 crc kubenswrapper[4768]: I1203 16:46:40.340369 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85f64749dc-rckkv" Dec 03 16:46:40 crc kubenswrapper[4768]: I1203 16:46:40.420977 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-w4c5j"] Dec 03 16:46:40 crc kubenswrapper[4768]: I1203 16:46:40.421287 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" podUID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerName="dnsmasq-dns" containerID="cri-o://3afb9d2072aa2446a8408be558f81091ec274fd2ac6737580caeacc8342b0639" gracePeriod=10 Dec 03 16:46:40 crc kubenswrapper[4768]: I1203 16:46:40.682405 4768 generic.go:334] "Generic (PLEG): container finished" podID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerID="3afb9d2072aa2446a8408be558f81091ec274fd2ac6737580caeacc8342b0639" exitCode=0 Dec 03 16:46:40 crc kubenswrapper[4768]: I1203 16:46:40.682405 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" event={"ID":"538d388d-64ae-4901-8a6f-7ae1e98ee636","Type":"ContainerDied","Data":"3afb9d2072aa2446a8408be558f81091ec274fd2ac6737580caeacc8342b0639"} Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.531472 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.653798 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-nb\") pod \"538d388d-64ae-4901-8a6f-7ae1e98ee636\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.653865 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-openstack-edpm-ipam\") pod \"538d388d-64ae-4901-8a6f-7ae1e98ee636\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.653920 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-svc\") pod \"538d388d-64ae-4901-8a6f-7ae1e98ee636\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.653959 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-sb\") pod \"538d388d-64ae-4901-8a6f-7ae1e98ee636\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.653982 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-config\") pod \"538d388d-64ae-4901-8a6f-7ae1e98ee636\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.654125 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsqzv\" (UniqueName: \"kubernetes.io/projected/538d388d-64ae-4901-8a6f-7ae1e98ee636-kube-api-access-zsqzv\") pod \"538d388d-64ae-4901-8a6f-7ae1e98ee636\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.654227 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-swift-storage-0\") pod \"538d388d-64ae-4901-8a6f-7ae1e98ee636\" (UID: \"538d388d-64ae-4901-8a6f-7ae1e98ee636\") " Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.661013 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/538d388d-64ae-4901-8a6f-7ae1e98ee636-kube-api-access-zsqzv" (OuterVolumeSpecName: "kube-api-access-zsqzv") pod "538d388d-64ae-4901-8a6f-7ae1e98ee636" (UID: "538d388d-64ae-4901-8a6f-7ae1e98ee636"). InnerVolumeSpecName "kube-api-access-zsqzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.714390 4768 generic.go:334] "Generic (PLEG): container finished" podID="e852a88e-fb99-477e-9b55-fa57a654c1f2" containerID="6c2db4874b3370a6b85deb15833e5a3c12b03f09a95a58d01b31ac6b3bfaaa7f" exitCode=0 Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.714458 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c6rx6" event={"ID":"e852a88e-fb99-477e-9b55-fa57a654c1f2","Type":"ContainerDied","Data":"6c2db4874b3370a6b85deb15833e5a3c12b03f09a95a58d01b31ac6b3bfaaa7f"} Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.714907 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "538d388d-64ae-4901-8a6f-7ae1e98ee636" (UID: "538d388d-64ae-4901-8a6f-7ae1e98ee636"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.719806 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" event={"ID":"538d388d-64ae-4901-8a6f-7ae1e98ee636","Type":"ContainerDied","Data":"f29ae131e020848366d609dbe71e85ef82b2f27eaa51b43903f4e06fd872426c"} Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.719860 4768 scope.go:117] "RemoveContainer" containerID="3afb9d2072aa2446a8408be558f81091ec274fd2ac6737580caeacc8342b0639" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.720042 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-dbb88bf8c-w4c5j" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.725376 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-config" (OuterVolumeSpecName: "config") pod "538d388d-64ae-4901-8a6f-7ae1e98ee636" (UID: "538d388d-64ae-4901-8a6f-7ae1e98ee636"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.740999 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "538d388d-64ae-4901-8a6f-7ae1e98ee636" (UID: "538d388d-64ae-4901-8a6f-7ae1e98ee636"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.745953 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "538d388d-64ae-4901-8a6f-7ae1e98ee636" (UID: "538d388d-64ae-4901-8a6f-7ae1e98ee636"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.753392 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "538d388d-64ae-4901-8a6f-7ae1e98ee636" (UID: "538d388d-64ae-4901-8a6f-7ae1e98ee636"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.757814 4768 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.757847 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.757861 4768 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.757873 4768 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.757884 4768 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-config\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.757897 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsqzv\" (UniqueName: \"kubernetes.io/projected/538d388d-64ae-4901-8a6f-7ae1e98ee636-kube-api-access-zsqzv\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.767539 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "538d388d-64ae-4901-8a6f-7ae1e98ee636" (UID: "538d388d-64ae-4901-8a6f-7ae1e98ee636"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.830730 4768 scope.go:117] "RemoveContainer" containerID="b5fdb644727c65a93b3f20e93843cf1e280912aa14e06028bad6c5f27d359cba" Dec 03 16:46:41 crc kubenswrapper[4768]: I1203 16:46:41.859653 4768 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/538d388d-64ae-4901-8a6f-7ae1e98ee636-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:42 crc kubenswrapper[4768]: I1203 16:46:42.056172 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-w4c5j"] Dec 03 16:46:42 crc kubenswrapper[4768]: I1203 16:46:42.071483 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-dbb88bf8c-w4c5j"] Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.212221 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.398852 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-scripts\") pod \"e852a88e-fb99-477e-9b55-fa57a654c1f2\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.398933 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-combined-ca-bundle\") pod \"e852a88e-fb99-477e-9b55-fa57a654c1f2\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.398959 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5g5v5\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-kube-api-access-5g5v5\") pod \"e852a88e-fb99-477e-9b55-fa57a654c1f2\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.399060 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-certs\") pod \"e852a88e-fb99-477e-9b55-fa57a654c1f2\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.399087 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-config-data\") pod \"e852a88e-fb99-477e-9b55-fa57a654c1f2\" (UID: \"e852a88e-fb99-477e-9b55-fa57a654c1f2\") " Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.406619 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-scripts" (OuterVolumeSpecName: "scripts") pod "e852a88e-fb99-477e-9b55-fa57a654c1f2" (UID: "e852a88e-fb99-477e-9b55-fa57a654c1f2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.407024 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-certs" (OuterVolumeSpecName: "certs") pod "e852a88e-fb99-477e-9b55-fa57a654c1f2" (UID: "e852a88e-fb99-477e-9b55-fa57a654c1f2"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.408800 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-kube-api-access-5g5v5" (OuterVolumeSpecName: "kube-api-access-5g5v5") pod "e852a88e-fb99-477e-9b55-fa57a654c1f2" (UID: "e852a88e-fb99-477e-9b55-fa57a654c1f2"). InnerVolumeSpecName "kube-api-access-5g5v5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.429250 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-config-data" (OuterVolumeSpecName: "config-data") pod "e852a88e-fb99-477e-9b55-fa57a654c1f2" (UID: "e852a88e-fb99-477e-9b55-fa57a654c1f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.450139 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e852a88e-fb99-477e-9b55-fa57a654c1f2" (UID: "e852a88e-fb99-477e-9b55-fa57a654c1f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.503054 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.503084 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5g5v5\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-kube-api-access-5g5v5\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.503094 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e852a88e-fb99-477e-9b55-fa57a654c1f2-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.503104 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.503112 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e852a88e-fb99-477e-9b55-fa57a654c1f2-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.556806 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="538d388d-64ae-4901-8a6f-7ae1e98ee636" path="/var/lib/kubelet/pods/538d388d-64ae-4901-8a6f-7ae1e98ee636/volumes" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.749874 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-c6rx6" event={"ID":"e852a88e-fb99-477e-9b55-fa57a654c1f2","Type":"ContainerDied","Data":"02301d5335d42acdd8dcc75369b7df9793c70b5626b2f90cbba921c3b20705fa"} Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.749915 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02301d5335d42acdd8dcc75369b7df9793c70b5626b2f90cbba921c3b20705fa" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.749937 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-c6rx6" Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.842166 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.842379 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="4f24e895-8599-4764-9cbd-30a74bad2423" containerName="cloudkitty-proc" containerID="cri-o://9df5d26753c5b7d836b831a6b72289787b31904d8f479af31de63060e92c50c3" gracePeriod=30 Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.861462 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.861763 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api-log" containerID="cri-o://17f8b4c0fe15656d345f5167fba5978f47abd64cb728bae5df7a3d6fb0064fd0" gracePeriod=30 Dec 03 16:46:43 crc kubenswrapper[4768]: I1203 16:46:43.861866 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api" containerID="cri-o://623e299e4155734d13d177d61ad0fdfd877de0186ce3f807b118ce3db9b38879" gracePeriod=30 Dec 03 16:46:44 crc kubenswrapper[4768]: I1203 16:46:44.763634 4768 generic.go:334] "Generic (PLEG): container finished" podID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerID="17f8b4c0fe15656d345f5167fba5978f47abd64cb728bae5df7a3d6fb0064fd0" exitCode=143 Dec 03 16:46:44 crc kubenswrapper[4768]: I1203 16:46:44.763727 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"3d103c3d-bc24-441d-a619-c02dd3be204f","Type":"ContainerDied","Data":"17f8b4c0fe15656d345f5167fba5978f47abd64cb728bae5df7a3d6fb0064fd0"} Dec 03 16:46:46 crc kubenswrapper[4768]: I1203 16:46:46.791803 4768 generic.go:334] "Generic (PLEG): container finished" podID="4f24e895-8599-4764-9cbd-30a74bad2423" containerID="9df5d26753c5b7d836b831a6b72289787b31904d8f479af31de63060e92c50c3" exitCode=0 Dec 03 16:46:46 crc kubenswrapper[4768]: I1203 16:46:46.791911 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4f24e895-8599-4764-9cbd-30a74bad2423","Type":"ContainerDied","Data":"9df5d26753c5b7d836b831a6b72289787b31904d8f479af31de63060e92c50c3"} Dec 03 16:46:46 crc kubenswrapper[4768]: I1203 16:46:46.795111 4768 generic.go:334] "Generic (PLEG): container finished" podID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerID="623e299e4155734d13d177d61ad0fdfd877de0186ce3f807b118ce3db9b38879" exitCode=0 Dec 03 16:46:46 crc kubenswrapper[4768]: I1203 16:46:46.795149 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"3d103c3d-bc24-441d-a619-c02dd3be204f","Type":"ContainerDied","Data":"623e299e4155734d13d177d61ad0fdfd877de0186ce3f807b118ce3db9b38879"} Dec 03 16:46:47 crc kubenswrapper[4768]: I1203 16:46:47.093314 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-api-0" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api" probeResult="failure" output="Get \"https://10.217.0.188:8889/healthcheck\": dial tcp 10.217.0.188:8889: connect: connection refused" Dec 03 16:46:47 crc kubenswrapper[4768]: I1203 16:46:47.934902 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102335 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-internal-tls-certs\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102405 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d103c3d-bc24-441d-a619-c02dd3be204f-logs\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102440 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-scripts\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102524 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-public-tls-certs\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102622 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102651 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw9nr\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-kube-api-access-zw9nr\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102685 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-combined-ca-bundle\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.102986 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-certs\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.103130 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data-custom\") pod \"3d103c3d-bc24-441d-a619-c02dd3be204f\" (UID: \"3d103c3d-bc24-441d-a619-c02dd3be204f\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.103710 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d103c3d-bc24-441d-a619-c02dd3be204f-logs" (OuterVolumeSpecName: "logs") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.104108 4768 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d103c3d-bc24-441d-a619-c02dd3be204f-logs\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.110129 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-kube-api-access-zw9nr" (OuterVolumeSpecName: "kube-api-access-zw9nr") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "kube-api-access-zw9nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.112743 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-certs" (OuterVolumeSpecName: "certs") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.112898 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.113762 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-scripts" (OuterVolumeSpecName: "scripts") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.162978 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data" (OuterVolumeSpecName: "config-data") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.166160 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.183335 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.201052 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3d103c3d-bc24-441d-a619-c02dd3be204f" (UID: "3d103c3d-bc24-441d-a619-c02dd3be204f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.206946 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.206983 4768 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.206997 4768 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.207006 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.207015 4768 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.207024 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.207034 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw9nr\" (UniqueName: \"kubernetes.io/projected/3d103c3d-bc24-441d-a619-c02dd3be204f-kube-api-access-zw9nr\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.207234 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d103c3d-bc24-441d-a619-c02dd3be204f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.413876 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.512845 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-certs\") pod \"4f24e895-8599-4764-9cbd-30a74bad2423\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.512938 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data-custom\") pod \"4f24e895-8599-4764-9cbd-30a74bad2423\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.513071 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f885p\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-kube-api-access-f885p\") pod \"4f24e895-8599-4764-9cbd-30a74bad2423\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.513128 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-combined-ca-bundle\") pod \"4f24e895-8599-4764-9cbd-30a74bad2423\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.513230 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data\") pod \"4f24e895-8599-4764-9cbd-30a74bad2423\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.513322 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-scripts\") pod \"4f24e895-8599-4764-9cbd-30a74bad2423\" (UID: \"4f24e895-8599-4764-9cbd-30a74bad2423\") " Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.517088 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-certs" (OuterVolumeSpecName: "certs") pod "4f24e895-8599-4764-9cbd-30a74bad2423" (UID: "4f24e895-8599-4764-9cbd-30a74bad2423"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.517134 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4f24e895-8599-4764-9cbd-30a74bad2423" (UID: "4f24e895-8599-4764-9cbd-30a74bad2423"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.517640 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-kube-api-access-f885p" (OuterVolumeSpecName: "kube-api-access-f885p") pod "4f24e895-8599-4764-9cbd-30a74bad2423" (UID: "4f24e895-8599-4764-9cbd-30a74bad2423"). InnerVolumeSpecName "kube-api-access-f885p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.519745 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-scripts" (OuterVolumeSpecName: "scripts") pod "4f24e895-8599-4764-9cbd-30a74bad2423" (UID: "4f24e895-8599-4764-9cbd-30a74bad2423"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.549104 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data" (OuterVolumeSpecName: "config-data") pod "4f24e895-8599-4764-9cbd-30a74bad2423" (UID: "4f24e895-8599-4764-9cbd-30a74bad2423"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.550975 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f24e895-8599-4764-9cbd-30a74bad2423" (UID: "4f24e895-8599-4764-9cbd-30a74bad2423"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.615840 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.615876 4768 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-scripts\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.615884 4768 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-certs\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.615892 4768 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.615902 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f885p\" (UniqueName: \"kubernetes.io/projected/4f24e895-8599-4764-9cbd-30a74bad2423-kube-api-access-f885p\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.615910 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f24e895-8599-4764-9cbd-30a74bad2423-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.818133 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"3d103c3d-bc24-441d-a619-c02dd3be204f","Type":"ContainerDied","Data":"ae0552e041d8c5186c19698b3ac04b09e442c012fc3bba3bba74885ffbb1b2a0"} Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.818431 4768 scope.go:117] "RemoveContainer" containerID="623e299e4155734d13d177d61ad0fdfd877de0186ce3f807b118ce3db9b38879" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.818168 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.819692 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"4f24e895-8599-4764-9cbd-30a74bad2423","Type":"ContainerDied","Data":"d25433323d48ece6473952cd4ab3735dbcd715b78df375ef24bba71c83d5c952"} Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.819746 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.879872 4768 scope.go:117] "RemoveContainer" containerID="17f8b4c0fe15656d345f5167fba5978f47abd64cb728bae5df7a3d6fb0064fd0" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.960854 4768 scope.go:117] "RemoveContainer" containerID="9df5d26753c5b7d836b831a6b72289787b31904d8f479af31de63060e92c50c3" Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.981401 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:46:48 crc kubenswrapper[4768]: I1203 16:46:48.989733 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.008092 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.020067 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.034867 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:46:49 crc kubenswrapper[4768]: E1203 16:46:49.035321 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f24e895-8599-4764-9cbd-30a74bad2423" containerName="cloudkitty-proc" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035332 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f24e895-8599-4764-9cbd-30a74bad2423" containerName="cloudkitty-proc" Dec 03 16:46:49 crc kubenswrapper[4768]: E1203 16:46:49.035354 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035360 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api" Dec 03 16:46:49 crc kubenswrapper[4768]: E1203 16:46:49.035376 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e852a88e-fb99-477e-9b55-fa57a654c1f2" containerName="cloudkitty-storageinit" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035382 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e852a88e-fb99-477e-9b55-fa57a654c1f2" containerName="cloudkitty-storageinit" Dec 03 16:46:49 crc kubenswrapper[4768]: E1203 16:46:49.035394 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api-log" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035399 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api-log" Dec 03 16:46:49 crc kubenswrapper[4768]: E1203 16:46:49.035415 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerName="init" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035420 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerName="init" Dec 03 16:46:49 crc kubenswrapper[4768]: E1203 16:46:49.035430 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerName="dnsmasq-dns" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035436 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerName="dnsmasq-dns" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035657 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="538d388d-64ae-4901-8a6f-7ae1e98ee636" containerName="dnsmasq-dns" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035669 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f24e895-8599-4764-9cbd-30a74bad2423" containerName="cloudkitty-proc" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035685 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="e852a88e-fb99-477e-9b55-fa57a654c1f2" containerName="cloudkitty-storageinit" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035694 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api-log" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.035703 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" containerName="cloudkitty-api" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.036381 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.047953 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.050452 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.050630 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.050771 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.050826 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.051050 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-dr7j4" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.075587 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.077370 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.080740 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.081073 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.081202 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.093972 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.143946 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ef812a88-c111-4283-b7ba-f90f3e946eec-certs\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144049 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144103 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144192 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144217 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkrg7\" (UniqueName: \"kubernetes.io/projected/e19aa1f3-5836-440b-bc7e-dfc10baf6511-kube-api-access-xkrg7\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144316 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-config-data\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144336 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144389 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144456 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e19aa1f3-5836-440b-bc7e-dfc10baf6511-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144498 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-config-data\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144532 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e19aa1f3-5836-440b-bc7e-dfc10baf6511-logs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144607 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-scripts\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144643 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-scripts\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144713 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.144753 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-575fc\" (UniqueName: \"kubernetes.io/projected/ef812a88-c111-4283-b7ba-f90f3e946eec-kube-api-access-575fc\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246607 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246661 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-config-data\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246701 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246736 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e19aa1f3-5836-440b-bc7e-dfc10baf6511-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246760 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-config-data\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246783 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e19aa1f3-5836-440b-bc7e-dfc10baf6511-logs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246814 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-scripts\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246834 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-scripts\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246869 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246892 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-575fc\" (UniqueName: \"kubernetes.io/projected/ef812a88-c111-4283-b7ba-f90f3e946eec-kube-api-access-575fc\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246923 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ef812a88-c111-4283-b7ba-f90f3e946eec-certs\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246947 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.246966 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.247005 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.247021 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkrg7\" (UniqueName: \"kubernetes.io/projected/e19aa1f3-5836-440b-bc7e-dfc10baf6511-kube-api-access-xkrg7\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.247836 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e19aa1f3-5836-440b-bc7e-dfc10baf6511-logs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.250731 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-scripts\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.251845 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ef812a88-c111-4283-b7ba-f90f3e946eec-certs\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.252087 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-config-data\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.252147 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.252480 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-scripts\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.255278 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.255572 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef812a88-c111-4283-b7ba-f90f3e946eec-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.255724 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e19aa1f3-5836-440b-bc7e-dfc10baf6511-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.256626 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.259216 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.260173 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-config-data\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.263537 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkrg7\" (UniqueName: \"kubernetes.io/projected/e19aa1f3-5836-440b-bc7e-dfc10baf6511-kube-api-access-xkrg7\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.264070 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-575fc\" (UniqueName: \"kubernetes.io/projected/ef812a88-c111-4283-b7ba-f90f3e946eec-kube-api-access-575fc\") pod \"cloudkitty-proc-0\" (UID: \"ef812a88-c111-4283-b7ba-f90f3e946eec\") " pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.269117 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e19aa1f3-5836-440b-bc7e-dfc10baf6511-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"e19aa1f3-5836-440b-bc7e-dfc10baf6511\") " pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.362755 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.393114 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.555250 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d103c3d-bc24-441d-a619-c02dd3be204f" path="/var/lib/kubelet/pods/3d103c3d-bc24-441d-a619-c02dd3be204f/volumes" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.556199 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f24e895-8599-4764-9cbd-30a74bad2423" path="/var/lib/kubelet/pods/4f24e895-8599-4764-9cbd-30a74bad2423/volumes" Dec 03 16:46:49 crc kubenswrapper[4768]: I1203 16:46:49.866712 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 03 16:46:50 crc kubenswrapper[4768]: W1203 16:46:50.050092 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode19aa1f3_5836_440b_bc7e_dfc10baf6511.slice/crio-7e6d4f51fae9edb84268028baf40185de2bb9b3d5e3f8903debe1bb73445cb84 WatchSource:0}: Error finding container 7e6d4f51fae9edb84268028baf40185de2bb9b3d5e3f8903debe1bb73445cb84: Status 404 returned error can't find the container with id 7e6d4f51fae9edb84268028baf40185de2bb9b3d5e3f8903debe1bb73445cb84 Dec 03 16:46:50 crc kubenswrapper[4768]: I1203 16:46:50.485792 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 03 16:46:50 crc kubenswrapper[4768]: W1203 16:46:50.491202 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef812a88_c111_4283_b7ba_f90f3e946eec.slice/crio-aace06a0cbf62bfe55fc92b8ee64dd6f467cff6866b9bf4772479c6d11d92f7d WatchSource:0}: Error finding container aace06a0cbf62bfe55fc92b8ee64dd6f467cff6866b9bf4772479c6d11d92f7d: Status 404 returned error can't find the container with id aace06a0cbf62bfe55fc92b8ee64dd6f467cff6866b9bf4772479c6d11d92f7d Dec 03 16:46:50 crc kubenswrapper[4768]: I1203 16:46:50.843410 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"ef812a88-c111-4283-b7ba-f90f3e946eec","Type":"ContainerStarted","Data":"aace06a0cbf62bfe55fc92b8ee64dd6f467cff6866b9bf4772479c6d11d92f7d"} Dec 03 16:46:50 crc kubenswrapper[4768]: I1203 16:46:50.845271 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"e19aa1f3-5836-440b-bc7e-dfc10baf6511","Type":"ContainerStarted","Data":"7e6d4f51fae9edb84268028baf40185de2bb9b3d5e3f8903debe1bb73445cb84"} Dec 03 16:46:51 crc kubenswrapper[4768]: I1203 16:46:51.532628 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:46:51 crc kubenswrapper[4768]: E1203 16:46:51.533022 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:46:51 crc kubenswrapper[4768]: I1203 16:46:51.857584 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"e19aa1f3-5836-440b-bc7e-dfc10baf6511","Type":"ContainerStarted","Data":"8611d445dfebcb1c15220f279c449eac32c3ebc679fa902ab575da6106916ce9"} Dec 03 16:46:53 crc kubenswrapper[4768]: I1203 16:46:53.875640 4768 generic.go:334] "Generic (PLEG): container finished" podID="7f83f074-b1a6-4d38-8a36-a6335766064f" containerID="506332fdd300d201bc3d2728701d456a81b52d888b869b2d15c8a9b2e1d574be" exitCode=0 Dec 03 16:46:53 crc kubenswrapper[4768]: I1203 16:46:53.875780 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7f83f074-b1a6-4d38-8a36-a6335766064f","Type":"ContainerDied","Data":"506332fdd300d201bc3d2728701d456a81b52d888b869b2d15c8a9b2e1d574be"} Dec 03 16:46:53 crc kubenswrapper[4768]: I1203 16:46:53.878126 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"e19aa1f3-5836-440b-bc7e-dfc10baf6511","Type":"ContainerStarted","Data":"e95a503665f3e96595c8ba7e9534e47f57957b4e5c5113babad177632fca4485"} Dec 03 16:46:53 crc kubenswrapper[4768]: I1203 16:46:53.880655 4768 generic.go:334] "Generic (PLEG): container finished" podID="e1ebf32c-184a-46da-8f0e-e955fb1fa5e8" containerID="bdc5add6641b503e8bc484ae0191f580785df109e72287006e3e9ee95e24769c" exitCode=0 Dec 03 16:46:53 crc kubenswrapper[4768]: I1203 16:46:53.880687 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8","Type":"ContainerDied","Data":"bdc5add6641b503e8bc484ae0191f580785df109e72287006e3e9ee95e24769c"} Dec 03 16:46:55 crc kubenswrapper[4768]: I1203 16:46:55.923018 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7f83f074-b1a6-4d38-8a36-a6335766064f","Type":"ContainerStarted","Data":"4f47fc89244f63c29eed9f0c3eac1b5490533bdc424754304ba43749d0d49f74"} Dec 03 16:46:55 crc kubenswrapper[4768]: I1203 16:46:55.924128 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 03 16:46:55 crc kubenswrapper[4768]: I1203 16:46:55.928499 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e1ebf32c-184a-46da-8f0e-e955fb1fa5e8","Type":"ContainerStarted","Data":"ce68ed4ae14e4ab81dfb624678f10f5e69290c2537797d37439a42e2b90a7e39"} Dec 03 16:46:55 crc kubenswrapper[4768]: I1203 16:46:55.928777 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 03 16:46:55 crc kubenswrapper[4768]: I1203 16:46:55.928974 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:46:55 crc kubenswrapper[4768]: I1203 16:46:55.967942 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=44.967925042 podStartE2EDuration="44.967925042s" podCreationTimestamp="2025-12-03 16:46:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:46:55.948544116 +0000 UTC m=+1712.867880539" watchObservedRunningTime="2025-12-03 16:46:55.967925042 +0000 UTC m=+1712.887261465" Dec 03 16:46:55 crc kubenswrapper[4768]: I1203 16:46:55.996396 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=47.996371244 podStartE2EDuration="47.996371244s" podCreationTimestamp="2025-12-03 16:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:46:55.979803492 +0000 UTC m=+1712.899139915" watchObservedRunningTime="2025-12-03 16:46:55.996371244 +0000 UTC m=+1712.915707677" Dec 03 16:46:56 crc kubenswrapper[4768]: I1203 16:46:56.015724 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=7.015702739 podStartE2EDuration="7.015702739s" podCreationTimestamp="2025-12-03 16:46:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 16:46:56.001989091 +0000 UTC m=+1712.921325514" watchObservedRunningTime="2025-12-03 16:46:56.015702739 +0000 UTC m=+1712.935039162" Dec 03 16:46:56 crc kubenswrapper[4768]: I1203 16:46:56.941345 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"ef812a88-c111-4283-b7ba-f90f3e946eec","Type":"ContainerStarted","Data":"e40267f3beb218d316aff5599110a5fec54f1cf14dcbe8c8e37689e5230ca65b"} Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.505682 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9"] Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.507293 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.509504 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.510439 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.511563 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.511975 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.524833 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.524907 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.524927 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vhnc\" (UniqueName: \"kubernetes.io/projected/ab27f0c2-92c5-4271-89a0-3faef991d57e-kube-api-access-9vhnc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.525046 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.558166 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9"] Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.627015 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.627087 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.627110 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vhnc\" (UniqueName: \"kubernetes.io/projected/ab27f0c2-92c5-4271-89a0-3faef991d57e-kube-api-access-9vhnc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.627235 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.632631 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.633009 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.635577 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.641478 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vhnc\" (UniqueName: \"kubernetes.io/projected/ab27f0c2-92c5-4271-89a0-3faef991d57e-kube-api-access-9vhnc\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:57 crc kubenswrapper[4768]: I1203 16:46:57.848227 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:46:58 crc kubenswrapper[4768]: I1203 16:46:58.005430 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=4.356611842 podStartE2EDuration="10.005416316s" podCreationTimestamp="2025-12-03 16:46:48 +0000 UTC" firstStartedPulling="2025-12-03 16:46:50.495036391 +0000 UTC m=+1707.414372824" lastFinishedPulling="2025-12-03 16:46:56.143840875 +0000 UTC m=+1713.063177298" observedRunningTime="2025-12-03 16:46:58.001795082 +0000 UTC m=+1714.921131505" watchObservedRunningTime="2025-12-03 16:46:58.005416316 +0000 UTC m=+1714.924752739" Dec 03 16:46:58 crc kubenswrapper[4768]: I1203 16:46:58.682690 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9"] Dec 03 16:46:58 crc kubenswrapper[4768]: W1203 16:46:58.683635 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab27f0c2_92c5_4271_89a0_3faef991d57e.slice/crio-a151eba4b05160cc2574829a8ef5464c84a78cf6ecbb67068a001717ea1d34b0 WatchSource:0}: Error finding container a151eba4b05160cc2574829a8ef5464c84a78cf6ecbb67068a001717ea1d34b0: Status 404 returned error can't find the container with id a151eba4b05160cc2574829a8ef5464c84a78cf6ecbb67068a001717ea1d34b0 Dec 03 16:46:58 crc kubenswrapper[4768]: I1203 16:46:58.973917 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" event={"ID":"ab27f0c2-92c5-4271-89a0-3faef991d57e","Type":"ContainerStarted","Data":"a151eba4b05160cc2574829a8ef5464c84a78cf6ecbb67068a001717ea1d34b0"} Dec 03 16:47:02 crc kubenswrapper[4768]: I1203 16:47:02.532279 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:47:02 crc kubenswrapper[4768]: E1203 16:47:02.533258 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:47:09 crc kubenswrapper[4768]: I1203 16:47:09.153248 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e1ebf32c-184a-46da-8f0e-e955fb1fa5e8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.236:5671: connect: connection refused" Dec 03 16:47:11 crc kubenswrapper[4768]: I1203 16:47:11.857293 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7f83f074-b1a6-4d38-8a36-a6335766064f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.237:5671: connect: connection refused" Dec 03 16:47:13 crc kubenswrapper[4768]: I1203 16:47:13.541385 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:47:13 crc kubenswrapper[4768]: E1203 16:47:13.541845 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:47:19 crc kubenswrapper[4768]: I1203 16:47:19.151397 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="e1ebf32c-184a-46da-8f0e-e955fb1fa5e8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.236:5671: connect: connection refused" Dec 03 16:47:21 crc kubenswrapper[4768]: I1203 16:47:21.853136 4768 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="7f83f074-b1a6-4d38-8a36-a6335766064f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.237:5671: connect: connection refused" Dec 03 16:47:22 crc kubenswrapper[4768]: E1203 16:47:22.951666 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest" Dec 03 16:47:22 crc kubenswrapper[4768]: E1203 16:47:22.952113 4768 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 03 16:47:22 crc kubenswrapper[4768]: container &Container{Name:repo-setup-edpm-deployment-openstack-edpm-ipam,Image:quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest,Command:[],Args:[ansible-runner run /runner -p playbook.yaml -i repo-setup-edpm-deployment-openstack-edpm-ipam],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:ANSIBLE_VERBOSITY,Value:2,ValueFrom:nil,},EnvVar{Name:RUNNER_PLAYBOOK,Value: Dec 03 16:47:22 crc kubenswrapper[4768]: - hosts: all Dec 03 16:47:22 crc kubenswrapper[4768]: strategy: linear Dec 03 16:47:22 crc kubenswrapper[4768]: tasks: Dec 03 16:47:22 crc kubenswrapper[4768]: - name: Enable podified-repos Dec 03 16:47:22 crc kubenswrapper[4768]: become: true Dec 03 16:47:22 crc kubenswrapper[4768]: ansible.builtin.shell: | Dec 03 16:47:22 crc kubenswrapper[4768]: set -euxo pipefail Dec 03 16:47:22 crc kubenswrapper[4768]: pushd /var/tmp Dec 03 16:47:22 crc kubenswrapper[4768]: curl -sL https://github.com/openstack-k8s-operators/repo-setup/archive/refs/heads/main.tar.gz | tar -xz Dec 03 16:47:22 crc kubenswrapper[4768]: pushd repo-setup-main Dec 03 16:47:22 crc kubenswrapper[4768]: python3 -m venv ./venv Dec 03 16:47:22 crc kubenswrapper[4768]: PBR_VERSION=0.0.0 ./venv/bin/pip install ./ Dec 03 16:47:22 crc kubenswrapper[4768]: ./venv/bin/repo-setup current-podified -b antelope Dec 03 16:47:22 crc kubenswrapper[4768]: popd Dec 03 16:47:22 crc kubenswrapper[4768]: rm -rf repo-setup-main Dec 03 16:47:22 crc kubenswrapper[4768]: Dec 03 16:47:22 crc kubenswrapper[4768]: Dec 03 16:47:22 crc kubenswrapper[4768]: ,ValueFrom:nil,},EnvVar{Name:RUNNER_EXTRA_VARS,Value: Dec 03 16:47:22 crc kubenswrapper[4768]: edpm_override_hosts: openstack-edpm-ipam Dec 03 16:47:22 crc kubenswrapper[4768]: edpm_service_type: repo-setup Dec 03 16:47:22 crc kubenswrapper[4768]: Dec 03 16:47:22 crc kubenswrapper[4768]: Dec 03 16:47:22 crc kubenswrapper[4768]: ,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:repo-setup-combined-ca-bundle,ReadOnly:false,MountPath:/var/lib/openstack/cacerts/repo-setup,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/runner/env/ssh_key,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:inventory,ReadOnly:false,MountPath:/runner/inventory/hosts,SubPath:inventory,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9vhnc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:openstack-aee-default-env,},Optional:*true,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9_openstack(ab27f0c2-92c5-4271-89a0-3faef991d57e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Dec 03 16:47:22 crc kubenswrapper[4768]: > logger="UnhandledError" Dec 03 16:47:22 crc kubenswrapper[4768]: E1203 16:47:22.953307 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" podUID="ab27f0c2-92c5-4271-89a0-3faef991d57e" Dec 03 16:47:23 crc kubenswrapper[4768]: E1203 16:47:23.257388 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"repo-setup-edpm-deployment-openstack-edpm-ipam\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-ansibleee-runner:latest\\\"\"" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" podUID="ab27f0c2-92c5-4271-89a0-3faef991d57e" Dec 03 16:47:26 crc kubenswrapper[4768]: I1203 16:47:26.213180 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 03 16:47:28 crc kubenswrapper[4768]: I1203 16:47:28.532831 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:47:28 crc kubenswrapper[4768]: E1203 16:47:28.533545 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:47:29 crc kubenswrapper[4768]: I1203 16:47:29.153909 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 03 16:47:31 crc kubenswrapper[4768]: I1203 16:47:31.853219 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 03 16:47:37 crc kubenswrapper[4768]: I1203 16:47:37.255405 4768 scope.go:117] "RemoveContainer" containerID="56fe90609af053750cebb497d51d46888c5dc58d2760b4ac9cc2d1e0156f85e3" Dec 03 16:47:38 crc kubenswrapper[4768]: I1203 16:47:38.215176 4768 scope.go:117] "RemoveContainer" containerID="1df1242371a73fb6c9edfea3ec7a2033e6c41cf783b6a3ac2603d23ebc68a712" Dec 03 16:47:42 crc kubenswrapper[4768]: I1203 16:47:42.533344 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:47:42 crc kubenswrapper[4768]: E1203 16:47:42.534531 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:47:55 crc kubenswrapper[4768]: I1203 16:47:55.702245 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:47:56 crc kubenswrapper[4768]: I1203 16:47:56.534924 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:47:56 crc kubenswrapper[4768]: E1203 16:47:56.535721 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:47:56 crc kubenswrapper[4768]: I1203 16:47:56.697461 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" event={"ID":"ab27f0c2-92c5-4271-89a0-3faef991d57e","Type":"ContainerStarted","Data":"782969904a626f38ab8465441cdcfa74fdc36bd0ef9341c3e9926cda21abd699"} Dec 03 16:47:56 crc kubenswrapper[4768]: I1203 16:47:56.731026 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" podStartSLOduration=2.71730473 podStartE2EDuration="59.731005388s" podCreationTimestamp="2025-12-03 16:46:57 +0000 UTC" firstStartedPulling="2025-12-03 16:46:58.685850974 +0000 UTC m=+1715.605187397" lastFinishedPulling="2025-12-03 16:47:55.699551612 +0000 UTC m=+1772.618888055" observedRunningTime="2025-12-03 16:47:56.718933744 +0000 UTC m=+1773.638270187" watchObservedRunningTime="2025-12-03 16:47:56.731005388 +0000 UTC m=+1773.650341811" Dec 03 16:48:07 crc kubenswrapper[4768]: I1203 16:48:07.533230 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:48:07 crc kubenswrapper[4768]: E1203 16:48:07.534125 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:48:14 crc kubenswrapper[4768]: I1203 16:48:14.922189 4768 generic.go:334] "Generic (PLEG): container finished" podID="ab27f0c2-92c5-4271-89a0-3faef991d57e" containerID="782969904a626f38ab8465441cdcfa74fdc36bd0ef9341c3e9926cda21abd699" exitCode=0 Dec 03 16:48:14 crc kubenswrapper[4768]: I1203 16:48:14.922312 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" event={"ID":"ab27f0c2-92c5-4271-89a0-3faef991d57e","Type":"ContainerDied","Data":"782969904a626f38ab8465441cdcfa74fdc36bd0ef9341c3e9926cda21abd699"} Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.540481 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.693407 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vhnc\" (UniqueName: \"kubernetes.io/projected/ab27f0c2-92c5-4271-89a0-3faef991d57e-kube-api-access-9vhnc\") pod \"ab27f0c2-92c5-4271-89a0-3faef991d57e\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.693559 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-inventory\") pod \"ab27f0c2-92c5-4271-89a0-3faef991d57e\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.693774 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-repo-setup-combined-ca-bundle\") pod \"ab27f0c2-92c5-4271-89a0-3faef991d57e\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.693894 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-ssh-key\") pod \"ab27f0c2-92c5-4271-89a0-3faef991d57e\" (UID: \"ab27f0c2-92c5-4271-89a0-3faef991d57e\") " Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.700570 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab27f0c2-92c5-4271-89a0-3faef991d57e-kube-api-access-9vhnc" (OuterVolumeSpecName: "kube-api-access-9vhnc") pod "ab27f0c2-92c5-4271-89a0-3faef991d57e" (UID: "ab27f0c2-92c5-4271-89a0-3faef991d57e"). InnerVolumeSpecName "kube-api-access-9vhnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.728809 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ab27f0c2-92c5-4271-89a0-3faef991d57e" (UID: "ab27f0c2-92c5-4271-89a0-3faef991d57e"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.736789 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-inventory" (OuterVolumeSpecName: "inventory") pod "ab27f0c2-92c5-4271-89a0-3faef991d57e" (UID: "ab27f0c2-92c5-4271-89a0-3faef991d57e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.746921 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ab27f0c2-92c5-4271-89a0-3faef991d57e" (UID: "ab27f0c2-92c5-4271-89a0-3faef991d57e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.797297 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vhnc\" (UniqueName: \"kubernetes.io/projected/ab27f0c2-92c5-4271-89a0-3faef991d57e-kube-api-access-9vhnc\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.797343 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.797356 4768 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.797368 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ab27f0c2-92c5-4271-89a0-3faef991d57e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.946096 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" event={"ID":"ab27f0c2-92c5-4271-89a0-3faef991d57e","Type":"ContainerDied","Data":"a151eba4b05160cc2574829a8ef5464c84a78cf6ecbb67068a001717ea1d34b0"} Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.946169 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a151eba4b05160cc2574829a8ef5464c84a78cf6ecbb67068a001717ea1d34b0" Dec 03 16:48:16 crc kubenswrapper[4768]: I1203 16:48:16.946251 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.047820 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg"] Dec 03 16:48:17 crc kubenswrapper[4768]: E1203 16:48:17.048437 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab27f0c2-92c5-4271-89a0-3faef991d57e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.048464 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab27f0c2-92c5-4271-89a0-3faef991d57e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.048751 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab27f0c2-92c5-4271-89a0-3faef991d57e" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.049849 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.053735 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.054085 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.054266 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.056942 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.061106 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg"] Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.103848 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.104029 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdx9k\" (UniqueName: \"kubernetes.io/projected/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-kube-api-access-bdx9k\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.104223 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.205819 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.205976 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.206003 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdx9k\" (UniqueName: \"kubernetes.io/projected/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-kube-api-access-bdx9k\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.211367 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.214363 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.225893 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdx9k\" (UniqueName: \"kubernetes.io/projected/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-kube-api-access-bdx9k\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-c9hxg\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.376126 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:17 crc kubenswrapper[4768]: I1203 16:48:17.968672 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg"] Dec 03 16:48:18 crc kubenswrapper[4768]: I1203 16:48:18.531664 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:48:18 crc kubenswrapper[4768]: E1203 16:48:18.532226 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:48:19 crc kubenswrapper[4768]: I1203 16:48:19.461514 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" event={"ID":"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6","Type":"ContainerStarted","Data":"3b368ab32a6380ffdc889f0d8c3c1f58c8ba89367887e27c47fa8238fc84a1d6"} Dec 03 16:48:19 crc kubenswrapper[4768]: I1203 16:48:19.461859 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" event={"ID":"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6","Type":"ContainerStarted","Data":"b7e1df7198e30635921931901f4f478539b360d042b7aa78737182bf028889b6"} Dec 03 16:48:19 crc kubenswrapper[4768]: I1203 16:48:19.518610 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" podStartSLOduration=2.093457013 podStartE2EDuration="2.518574051s" podCreationTimestamp="2025-12-03 16:48:17 +0000 UTC" firstStartedPulling="2025-12-03 16:48:17.976402206 +0000 UTC m=+1794.895738639" lastFinishedPulling="2025-12-03 16:48:18.401519214 +0000 UTC m=+1795.320855677" observedRunningTime="2025-12-03 16:48:19.491309993 +0000 UTC m=+1796.410646416" watchObservedRunningTime="2025-12-03 16:48:19.518574051 +0000 UTC m=+1796.437910474" Dec 03 16:48:21 crc kubenswrapper[4768]: I1203 16:48:21.489321 4768 generic.go:334] "Generic (PLEG): container finished" podID="2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" containerID="3b368ab32a6380ffdc889f0d8c3c1f58c8ba89367887e27c47fa8238fc84a1d6" exitCode=0 Dec 03 16:48:21 crc kubenswrapper[4768]: I1203 16:48:21.489440 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" event={"ID":"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6","Type":"ContainerDied","Data":"3b368ab32a6380ffdc889f0d8c3c1f58c8ba89367887e27c47fa8238fc84a1d6"} Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.049137 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.205892 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-inventory\") pod \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.206135 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdx9k\" (UniqueName: \"kubernetes.io/projected/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-kube-api-access-bdx9k\") pod \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.206167 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key\") pod \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.211396 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-kube-api-access-bdx9k" (OuterVolumeSpecName: "kube-api-access-bdx9k") pod "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" (UID: "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6"). InnerVolumeSpecName "kube-api-access-bdx9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:48:23 crc kubenswrapper[4768]: E1203 16:48:23.239019 4768 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key podName:2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6 nodeName:}" failed. No retries permitted until 2025-12-03 16:48:23.738992069 +0000 UTC m=+1800.658328492 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key") pod "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" (UID: "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6") : error deleting /var/lib/kubelet/pods/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6/volume-subpaths: remove /var/lib/kubelet/pods/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6/volume-subpaths: no such file or directory Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.241571 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-inventory" (OuterVolumeSpecName: "inventory") pod "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" (UID: "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.309159 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdx9k\" (UniqueName: \"kubernetes.io/projected/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-kube-api-access-bdx9k\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.309196 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.524969 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" event={"ID":"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6","Type":"ContainerDied","Data":"b7e1df7198e30635921931901f4f478539b360d042b7aa78737182bf028889b6"} Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.525018 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7e1df7198e30635921931901f4f478539b360d042b7aa78737182bf028889b6" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.525048 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-c9hxg" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.722562 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4"] Dec 03 16:48:23 crc kubenswrapper[4768]: E1203 16:48:23.723112 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.723135 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.723321 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.724099 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.747738 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4"] Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.831556 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key\") pod \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\" (UID: \"2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6\") " Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.832119 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.832209 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.832241 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxbc2\" (UniqueName: \"kubernetes.io/projected/76882892-8177-4627-a611-f9e6e75d9829-kube-api-access-kxbc2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.832266 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.849358 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6" (UID: "2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.934057 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.934130 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxbc2\" (UniqueName: \"kubernetes.io/projected/76882892-8177-4627-a611-f9e6e75d9829-kube-api-access-kxbc2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.934353 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.934530 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.934659 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.937786 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.938263 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.939084 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:23 crc kubenswrapper[4768]: I1203 16:48:23.961770 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxbc2\" (UniqueName: \"kubernetes.io/projected/76882892-8177-4627-a611-f9e6e75d9829-kube-api-access-kxbc2\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:24 crc kubenswrapper[4768]: I1203 16:48:24.050780 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:48:24 crc kubenswrapper[4768]: I1203 16:48:24.582878 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4"] Dec 03 16:48:25 crc kubenswrapper[4768]: I1203 16:48:25.550417 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" event={"ID":"76882892-8177-4627-a611-f9e6e75d9829","Type":"ContainerStarted","Data":"b5c345bf3ec14a5eaffa1a9050142d2e0a234081e22a1760b4bb2f17a92fb082"} Dec 03 16:48:25 crc kubenswrapper[4768]: I1203 16:48:25.550698 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" event={"ID":"76882892-8177-4627-a611-f9e6e75d9829","Type":"ContainerStarted","Data":"29a1cb1ec28dd58dd51f661ffff96e9cd2c06f4cd0c8a86b0d606941956e6dec"} Dec 03 16:48:25 crc kubenswrapper[4768]: I1203 16:48:25.570961 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" podStartSLOduration=2.119061599 podStartE2EDuration="2.57093953s" podCreationTimestamp="2025-12-03 16:48:23 +0000 UTC" firstStartedPulling="2025-12-03 16:48:24.590426386 +0000 UTC m=+1801.509762809" lastFinishedPulling="2025-12-03 16:48:25.042304297 +0000 UTC m=+1801.961640740" observedRunningTime="2025-12-03 16:48:25.567703216 +0000 UTC m=+1802.487039669" watchObservedRunningTime="2025-12-03 16:48:25.57093953 +0000 UTC m=+1802.490275953" Dec 03 16:48:33 crc kubenswrapper[4768]: I1203 16:48:33.540869 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:48:33 crc kubenswrapper[4768]: E1203 16:48:33.541811 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:48:44 crc kubenswrapper[4768]: I1203 16:48:44.218919 4768 scope.go:117] "RemoveContainer" containerID="1da409058f9f17fce4c22cb57c1020954041b02a6dc3c622425b81067f82ca75" Dec 03 16:48:44 crc kubenswrapper[4768]: I1203 16:48:44.249208 4768 scope.go:117] "RemoveContainer" containerID="4af4df7fb7c9fd042e43562b990cf68a90be98ac6394573f270f91f51b285261" Dec 03 16:48:44 crc kubenswrapper[4768]: I1203 16:48:44.284245 4768 scope.go:117] "RemoveContainer" containerID="6897e6021f9255f3f9879dacaa346be1f440258d4c1c009e12601d680bb41605" Dec 03 16:48:44 crc kubenswrapper[4768]: I1203 16:48:44.328991 4768 scope.go:117] "RemoveContainer" containerID="416b952057d30e2fbed49476298cbb7ce11addd9ae2a5190aeffb30029cc5e74" Dec 03 16:48:44 crc kubenswrapper[4768]: I1203 16:48:44.366373 4768 scope.go:117] "RemoveContainer" containerID="ef785d54a0a249f9a1f47fa69c260b2f8fadcddc71167f35b1852fd23c897f8b" Dec 03 16:48:44 crc kubenswrapper[4768]: I1203 16:48:44.531903 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:48:44 crc kubenswrapper[4768]: E1203 16:48:44.532722 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:48:57 crc kubenswrapper[4768]: I1203 16:48:57.531798 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:48:57 crc kubenswrapper[4768]: E1203 16:48:57.532668 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:49:11 crc kubenswrapper[4768]: I1203 16:49:11.533282 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:49:11 crc kubenswrapper[4768]: E1203 16:49:11.534467 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:49:25 crc kubenswrapper[4768]: I1203 16:49:25.532959 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:49:25 crc kubenswrapper[4768]: E1203 16:49:25.534073 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:49:37 crc kubenswrapper[4768]: I1203 16:49:37.532654 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:49:39 crc kubenswrapper[4768]: I1203 16:49:39.417000 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"b50653ba28a8284980c8fa897a37cb159ef35d14a7297dd3ec0ba365db9040d8"} Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.064324 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-49e4-account-create-update-v4lpm"] Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.074640 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-jc27s"] Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.086284 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-ac7a-account-create-update-p8klf"] Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.097265 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-49e4-account-create-update-v4lpm"] Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.106769 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-jc27s"] Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.115961 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-ac7a-account-create-update-p8klf"] Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.544099 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bc7eee1-578b-4091-8261-2f27ad2122cc" path="/var/lib/kubelet/pods/2bc7eee1-578b-4091-8261-2f27ad2122cc/volumes" Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.544903 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95e13608-2d5c-43c8-a443-715a32b7edda" path="/var/lib/kubelet/pods/95e13608-2d5c-43c8-a443-715a32b7edda/volumes" Dec 03 16:50:01 crc kubenswrapper[4768]: I1203 16:50:01.545679 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcf80a9b-b333-4ebb-b757-0306230722df" path="/var/lib/kubelet/pods/dcf80a9b-b333-4ebb-b757-0306230722df/volumes" Dec 03 16:50:02 crc kubenswrapper[4768]: I1203 16:50:02.057658 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-9zmrs"] Dec 03 16:50:02 crc kubenswrapper[4768]: I1203 16:50:02.069779 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-976f-account-create-update-gsp5b"] Dec 03 16:50:02 crc kubenswrapper[4768]: I1203 16:50:02.083081 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-nlld2"] Dec 03 16:50:02 crc kubenswrapper[4768]: I1203 16:50:02.092319 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-9zmrs"] Dec 03 16:50:02 crc kubenswrapper[4768]: I1203 16:50:02.102200 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-nlld2"] Dec 03 16:50:02 crc kubenswrapper[4768]: I1203 16:50:02.111635 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-976f-account-create-update-gsp5b"] Dec 03 16:50:03 crc kubenswrapper[4768]: I1203 16:50:03.556940 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4" path="/var/lib/kubelet/pods/56dae5f4-91e5-47d5-a0c0-c3c93b9e7db4/volumes" Dec 03 16:50:03 crc kubenswrapper[4768]: I1203 16:50:03.561983 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad42c9c3-1853-43c3-a434-23d0889b8dd4" path="/var/lib/kubelet/pods/ad42c9c3-1853-43c3-a434-23d0889b8dd4/volumes" Dec 03 16:50:03 crc kubenswrapper[4768]: I1203 16:50:03.563273 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b709be2c-2e7f-4013-889f-cff78d262a56" path="/var/lib/kubelet/pods/b709be2c-2e7f-4013-889f-cff78d262a56/volumes" Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.036538 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-create-2z5jj"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.049268 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-gqbzb"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.060128 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-9kgrw"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.069515 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-f95e-account-create-update-f4h7x"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.078372 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-create-2z5jj"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.087591 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-gqbzb"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.096699 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-9kgrw"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.105615 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-f95e-account-create-update-f4h7x"] Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.547731 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129" path="/var/lib/kubelet/pods/5f37bfbc-3d6a-4bc2-be68-bdbd1e6b2129/volumes" Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.548802 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1c3d427-12ff-4e36-bfaf-eebb41eb2c97" path="/var/lib/kubelet/pods/b1c3d427-12ff-4e36-bfaf-eebb41eb2c97/volumes" Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.551325 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcf11438-066f-4718-9f14-d19c5d998c5a" path="/var/lib/kubelet/pods/bcf11438-066f-4718-9f14-d19c5d998c5a/volumes" Dec 03 16:50:27 crc kubenswrapper[4768]: I1203 16:50:27.552707 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f14793fd-c763-44bd-b629-3426b1ccc605" path="/var/lib/kubelet/pods/f14793fd-c763-44bd-b629-3426b1ccc605/volumes" Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.041053 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-2a41-account-create-update-qz7zm"] Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.052759 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-5dfb-account-create-update-txwn7"] Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.064705 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-d268s"] Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.074808 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-2a41-account-create-update-qz7zm"] Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.085555 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-5dfb-account-create-update-txwn7"] Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.093799 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-d268s"] Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.101975 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-8124-account-create-update-n4jv6"] Dec 03 16:50:28 crc kubenswrapper[4768]: I1203 16:50:28.111847 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-8124-account-create-update-n4jv6"] Dec 03 16:50:29 crc kubenswrapper[4768]: I1203 16:50:29.544698 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31ba5ee1-82b2-4095-a92a-d3f5cd49482c" path="/var/lib/kubelet/pods/31ba5ee1-82b2-4095-a92a-d3f5cd49482c/volumes" Dec 03 16:50:29 crc kubenswrapper[4768]: I1203 16:50:29.545476 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4da4c050-3db3-42cc-9b36-e65917c8b977" path="/var/lib/kubelet/pods/4da4c050-3db3-42cc-9b36-e65917c8b977/volumes" Dec 03 16:50:29 crc kubenswrapper[4768]: I1203 16:50:29.546047 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5f87cd-fa59-4074-b87c-3e3f5760ddb5" path="/var/lib/kubelet/pods/bc5f87cd-fa59-4074-b87c-3e3f5760ddb5/volumes" Dec 03 16:50:29 crc kubenswrapper[4768]: I1203 16:50:29.546555 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4e9447e-4441-4183-9511-4780a1af50d4" path="/var/lib/kubelet/pods/d4e9447e-4441-4183-9511-4780a1af50d4/volumes" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.550791 4768 scope.go:117] "RemoveContainer" containerID="08cdb5aeff4008b72e86d49c18e4a9d4e99fd4c474e123b5f47954964cbf8053" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.593504 4768 scope.go:117] "RemoveContainer" containerID="5c458684187373aa1d4e334cd9c7945cb719757aec798b5c74415d02f4bf83ee" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.661815 4768 scope.go:117] "RemoveContainer" containerID="7b2c7e5607a6e9c3f01cf02cad648a4eef7d15bc628b57b0eeb0ed1a65abb194" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.707469 4768 scope.go:117] "RemoveContainer" containerID="c7ac60d5fd14fa49377774b943f6dd3e1c35f927a07a9f362fd7899257aa9902" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.756131 4768 scope.go:117] "RemoveContainer" containerID="f4442a29392b02509c413d37901f8c61df6b820f28bde5e13123859318b27372" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.835630 4768 scope.go:117] "RemoveContainer" containerID="5b343baf0a51bb146381fa15c64630f494bdc03631eb71d9f37a17518745dd77" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.902396 4768 scope.go:117] "RemoveContainer" containerID="053d729556746c14f91ff5d6925566b663f5b9f392c85e91115f16e53e9bf848" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.930181 4768 scope.go:117] "RemoveContainer" containerID="b0fbd670114b36ce9f2697718d82351758e858795cfac8f1ca088d0a994444d0" Dec 03 16:50:44 crc kubenswrapper[4768]: I1203 16:50:44.960247 4768 scope.go:117] "RemoveContainer" containerID="8ddf30bc6ba6df26a0b026aae4f9d4972ce25ed92e77fca2a7a7e0a4631e9448" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.396961 4768 scope.go:117] "RemoveContainer" containerID="528926e7c28d646c5d65cdf6d040ca17652d74ecadaff72ebeb8f3f23dd7b279" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.430118 4768 scope.go:117] "RemoveContainer" containerID="ce58dbd66c7da0fac5ca83d7a95f64b1353e2b6b22bf716ebc654bf20a1beac2" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.477550 4768 scope.go:117] "RemoveContainer" containerID="0346c176dc90077c526ec55f48c5438df729d085e71182ec99566d908fd39b00" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.503040 4768 scope.go:117] "RemoveContainer" containerID="a939e7a084def3cbe0569596f865278e89d78443ff2816aa7bc18a9b650f10dd" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.524375 4768 scope.go:117] "RemoveContainer" containerID="e73bda5eb724d99f05684f50b3a69bf527da823430998f0b3ba0af6925e769b4" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.560010 4768 scope.go:117] "RemoveContainer" containerID="9e7afe659c82eac06ee9678c1cf3d5d00652b9c957a9b9f70940eeb8cb72e7f5" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.647711 4768 scope.go:117] "RemoveContainer" containerID="3b06e68d57d4a33409a06a905f8b8c6979ab4a98da40194e7fb1dc86da094644" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.695909 4768 scope.go:117] "RemoveContainer" containerID="0638f152b05881f5b84702ffc7c29da99a2318d9a1e0500beed153f735980ead" Dec 03 16:50:45 crc kubenswrapper[4768]: I1203 16:50:45.745451 4768 scope.go:117] "RemoveContainer" containerID="611a5bbb7bf89b1c4b8b8167dd4506a0883ebfc17dbd4b5252a7991d61348458" Dec 03 16:50:51 crc kubenswrapper[4768]: I1203 16:50:51.053280 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-6j9c8"] Dec 03 16:50:51 crc kubenswrapper[4768]: I1203 16:50:51.068521 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-zqcnl"] Dec 03 16:50:51 crc kubenswrapper[4768]: I1203 16:50:51.079143 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-zqcnl"] Dec 03 16:50:51 crc kubenswrapper[4768]: I1203 16:50:51.087436 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-6j9c8"] Dec 03 16:50:51 crc kubenswrapper[4768]: I1203 16:50:51.543992 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a7a3873-e2cc-41e3-9151-715913ade3b2" path="/var/lib/kubelet/pods/2a7a3873-e2cc-41e3-9151-715913ade3b2/volumes" Dec 03 16:50:51 crc kubenswrapper[4768]: I1203 16:50:51.544693 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="495fd475-f8cd-4fd9-86d4-cdf7765e7ad6" path="/var/lib/kubelet/pods/495fd475-f8cd-4fd9-86d4-cdf7765e7ad6/volumes" Dec 03 16:51:29 crc kubenswrapper[4768]: I1203 16:51:29.045933 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-6fxl6"] Dec 03 16:51:29 crc kubenswrapper[4768]: I1203 16:51:29.059962 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-6fxl6"] Dec 03 16:51:29 crc kubenswrapper[4768]: I1203 16:51:29.545003 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3088df5-8818-432a-997e-d6b6b2d7daca" path="/var/lib/kubelet/pods/f3088df5-8818-432a-997e-d6b6b2d7daca/volumes" Dec 03 16:51:46 crc kubenswrapper[4768]: I1203 16:51:46.111840 4768 scope.go:117] "RemoveContainer" containerID="cdc3d205bae4c7f868ae3fa4761b22816cd49c390585fab5ed2111cb0588e1a9" Dec 03 16:51:46 crc kubenswrapper[4768]: I1203 16:51:46.172372 4768 scope.go:117] "RemoveContainer" containerID="d50da033304a43f898c6f04a390690a054e0a05f712a7c3a1c7897acc2425d43" Dec 03 16:51:46 crc kubenswrapper[4768]: I1203 16:51:46.233701 4768 scope.go:117] "RemoveContainer" containerID="4e855590d6d14758bee6f9fe4309487b31c6ec27c7271add7ea5f20efbb3de7e" Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.057562 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-xchkb"] Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.069933 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-t54sm"] Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.080488 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-5vkb8"] Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.089688 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-t54sm"] Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.098879 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-5vkb8"] Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.109004 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-xchkb"] Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.552136 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5902e376-547e-485a-a963-0c3bc5c5cfe7" path="/var/lib/kubelet/pods/5902e376-547e-485a-a963-0c3bc5c5cfe7/volumes" Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.552923 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ce5b640-44a6-4924-9f9c-d39b9247c4b3" path="/var/lib/kubelet/pods/5ce5b640-44a6-4924-9f9c-d39b9247c4b3/volumes" Dec 03 16:51:49 crc kubenswrapper[4768]: I1203 16:51:49.553426 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b9cb523-2eda-41a4-97de-745d2b5eb5b4" path="/var/lib/kubelet/pods/8b9cb523-2eda-41a4-97de-745d2b5eb5b4/volumes" Dec 03 16:51:56 crc kubenswrapper[4768]: I1203 16:51:56.028899 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:51:56 crc kubenswrapper[4768]: I1203 16:51:56.029792 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:52:00 crc kubenswrapper[4768]: I1203 16:52:00.035064 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-mftv7"] Dec 03 16:52:00 crc kubenswrapper[4768]: I1203 16:52:00.044694 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-mftv7"] Dec 03 16:52:01 crc kubenswrapper[4768]: I1203 16:52:01.551962 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f19bd8f-a9f2-41de-b0f3-de08db42cf69" path="/var/lib/kubelet/pods/8f19bd8f-a9f2-41de-b0f3-de08db42cf69/volumes" Dec 03 16:52:26 crc kubenswrapper[4768]: I1203 16:52:26.028303 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:52:26 crc kubenswrapper[4768]: I1203 16:52:26.029622 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:52:42 crc kubenswrapper[4768]: I1203 16:52:42.050242 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-vhh2l"] Dec 03 16:52:42 crc kubenswrapper[4768]: I1203 16:52:42.061677 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-b2p9v"] Dec 03 16:52:42 crc kubenswrapper[4768]: I1203 16:52:42.072623 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-vhh2l"] Dec 03 16:52:42 crc kubenswrapper[4768]: I1203 16:52:42.081722 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-b2p9v"] Dec 03 16:52:43 crc kubenswrapper[4768]: I1203 16:52:43.030089 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-b1ad-account-create-update-64z2j"] Dec 03 16:52:43 crc kubenswrapper[4768]: I1203 16:52:43.045343 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-b1ad-account-create-update-64z2j"] Dec 03 16:52:43 crc kubenswrapper[4768]: I1203 16:52:43.544207 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d848bbe-f5d1-4661-b0fe-77acbc5de436" path="/var/lib/kubelet/pods/9d848bbe-f5d1-4661-b0fe-77acbc5de436/volumes" Dec 03 16:52:43 crc kubenswrapper[4768]: I1203 16:52:43.544806 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d76511f3-f6aa-4505-9721-68c83831c1d4" path="/var/lib/kubelet/pods/d76511f3-f6aa-4505-9721-68c83831c1d4/volumes" Dec 03 16:52:43 crc kubenswrapper[4768]: I1203 16:52:43.545324 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead" path="/var/lib/kubelet/pods/ed3f86e6-6517-4b4d-9d1f-d1b43d5ebead/volumes" Dec 03 16:52:44 crc kubenswrapper[4768]: I1203 16:52:44.040775 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-6ef0-account-create-update-hxsjh"] Dec 03 16:52:44 crc kubenswrapper[4768]: I1203 16:52:44.055045 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-fff5-account-create-update-vdjxl"] Dec 03 16:52:44 crc kubenswrapper[4768]: I1203 16:52:44.065065 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-xqmkm"] Dec 03 16:52:44 crc kubenswrapper[4768]: I1203 16:52:44.072989 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-6ef0-account-create-update-hxsjh"] Dec 03 16:52:44 crc kubenswrapper[4768]: I1203 16:52:44.080556 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-fff5-account-create-update-vdjxl"] Dec 03 16:52:44 crc kubenswrapper[4768]: I1203 16:52:44.087882 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-xqmkm"] Dec 03 16:52:45 crc kubenswrapper[4768]: I1203 16:52:45.544429 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45c7dcf6-60c1-4dce-b656-20da30e0414f" path="/var/lib/kubelet/pods/45c7dcf6-60c1-4dce-b656-20da30e0414f/volumes" Dec 03 16:52:45 crc kubenswrapper[4768]: I1203 16:52:45.546414 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d1a50ae-6bca-4c21-a19c-840c488991fe" path="/var/lib/kubelet/pods/5d1a50ae-6bca-4c21-a19c-840c488991fe/volumes" Dec 03 16:52:45 crc kubenswrapper[4768]: I1203 16:52:45.547212 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8db80e6-2ed4-434a-bb54-a0f7effe70b1" path="/var/lib/kubelet/pods/e8db80e6-2ed4-434a-bb54-a0f7effe70b1/volumes" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.363860 4768 scope.go:117] "RemoveContainer" containerID="8b60833b2aa30b920aafa424656808df04f5d2708a243131266aafccfdcd4c16" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.408111 4768 scope.go:117] "RemoveContainer" containerID="03dc37845418a82e1434111c1a55ca93ba22aeb38ca3d093d67b7a40a0de4628" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.465627 4768 scope.go:117] "RemoveContainer" containerID="5c8f67b79f1e6dbe203d4a7000ce360cfeb1bab82b13180d19dfad5f70e5d753" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.509991 4768 scope.go:117] "RemoveContainer" containerID="ab7b3d416f73b914ad908bba6975d6a8eba78deea5b704ec42ef64231a9df727" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.566473 4768 scope.go:117] "RemoveContainer" containerID="cd40e10cd09bf0e0dbdb8b5a036f119bc0d0a1e6f09e9f377800acb538039240" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.656460 4768 scope.go:117] "RemoveContainer" containerID="1445b7c1337018071e566eb6dc26208284d56add0481f71ebdef381028ffbdcb" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.683157 4768 scope.go:117] "RemoveContainer" containerID="74eb5b94d690b42190ddb837fa8a487652471f1a44d693bb112106868d6f37a2" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.712526 4768 scope.go:117] "RemoveContainer" containerID="26775996bd1017e47a5e02ab639819be4fe3d1b8d7bef66bcfe5fd60aa1c7361" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.746754 4768 scope.go:117] "RemoveContainer" containerID="c65f9c4ef6606fcdaf6c173b6a0f891dc99b70d2ab6b316549b2e25791b5086a" Dec 03 16:52:46 crc kubenswrapper[4768]: I1203 16:52:46.764509 4768 scope.go:117] "RemoveContainer" containerID="71da2bdd8eea1e016ecec5d2b7394388807cb7a76a4c994a598283b866ce2983" Dec 03 16:52:54 crc kubenswrapper[4768]: I1203 16:52:54.844090 4768 generic.go:334] "Generic (PLEG): container finished" podID="76882892-8177-4627-a611-f9e6e75d9829" containerID="b5c345bf3ec14a5eaffa1a9050142d2e0a234081e22a1760b4bb2f17a92fb082" exitCode=0 Dec 03 16:52:54 crc kubenswrapper[4768]: I1203 16:52:54.844183 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" event={"ID":"76882892-8177-4627-a611-f9e6e75d9829","Type":"ContainerDied","Data":"b5c345bf3ec14a5eaffa1a9050142d2e0a234081e22a1760b4bb2f17a92fb082"} Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.028418 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.028800 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.028844 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.029706 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b50653ba28a8284980c8fa897a37cb159ef35d14a7297dd3ec0ba365db9040d8"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.029761 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://b50653ba28a8284980c8fa897a37cb159ef35d14a7297dd3ec0ba365db9040d8" gracePeriod=600 Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.569083 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.653536 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-bootstrap-combined-ca-bundle\") pod \"76882892-8177-4627-a611-f9e6e75d9829\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.653653 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-inventory\") pod \"76882892-8177-4627-a611-f9e6e75d9829\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.653712 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxbc2\" (UniqueName: \"kubernetes.io/projected/76882892-8177-4627-a611-f9e6e75d9829-kube-api-access-kxbc2\") pod \"76882892-8177-4627-a611-f9e6e75d9829\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.653812 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-ssh-key\") pod \"76882892-8177-4627-a611-f9e6e75d9829\" (UID: \"76882892-8177-4627-a611-f9e6e75d9829\") " Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.659651 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76882892-8177-4627-a611-f9e6e75d9829-kube-api-access-kxbc2" (OuterVolumeSpecName: "kube-api-access-kxbc2") pod "76882892-8177-4627-a611-f9e6e75d9829" (UID: "76882892-8177-4627-a611-f9e6e75d9829"). InnerVolumeSpecName "kube-api-access-kxbc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.663897 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "76882892-8177-4627-a611-f9e6e75d9829" (UID: "76882892-8177-4627-a611-f9e6e75d9829"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.696074 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-inventory" (OuterVolumeSpecName: "inventory") pod "76882892-8177-4627-a611-f9e6e75d9829" (UID: "76882892-8177-4627-a611-f9e6e75d9829"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.697829 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "76882892-8177-4627-a611-f9e6e75d9829" (UID: "76882892-8177-4627-a611-f9e6e75d9829"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.756568 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.756638 4768 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.756656 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76882892-8177-4627-a611-f9e6e75d9829-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.756668 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxbc2\" (UniqueName: \"kubernetes.io/projected/76882892-8177-4627-a611-f9e6e75d9829-kube-api-access-kxbc2\") on node \"crc\" DevicePath \"\"" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.864568 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" event={"ID":"76882892-8177-4627-a611-f9e6e75d9829","Type":"ContainerDied","Data":"29a1cb1ec28dd58dd51f661ffff96e9cd2c06f4cd0c8a86b0d606941956e6dec"} Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.864992 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29a1cb1ec28dd58dd51f661ffff96e9cd2c06f4cd0c8a86b0d606941956e6dec" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.864583 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.867586 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="b50653ba28a8284980c8fa897a37cb159ef35d14a7297dd3ec0ba365db9040d8" exitCode=0 Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.867643 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"b50653ba28a8284980c8fa897a37cb159ef35d14a7297dd3ec0ba365db9040d8"} Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.867671 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4"} Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.867687 4768 scope.go:117] "RemoveContainer" containerID="6288fe25fcbac6d52127208e54587b0b24d53507a42e9289eacdf6daa321c7a9" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.956403 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t"] Dec 03 16:52:56 crc kubenswrapper[4768]: E1203 16:52:56.956819 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76882892-8177-4627-a611-f9e6e75d9829" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.956837 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="76882892-8177-4627-a611-f9e6e75d9829" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.957030 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="76882892-8177-4627-a611-f9e6e75d9829" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.957766 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.961904 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.962076 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.962120 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.962233 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:52:56 crc kubenswrapper[4768]: I1203 16:52:56.965972 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t"] Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.063251 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.063306 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhbgw\" (UniqueName: \"kubernetes.io/projected/91a0e247-aab8-40b9-83e3-687d7f6a5927-kube-api-access-zhbgw\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.063337 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.165910 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.166322 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhbgw\" (UniqueName: \"kubernetes.io/projected/91a0e247-aab8-40b9-83e3-687d7f6a5927-kube-api-access-zhbgw\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.166372 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.171363 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.173277 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.184885 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhbgw\" (UniqueName: \"kubernetes.io/projected/91a0e247-aab8-40b9-83e3-687d7f6a5927-kube-api-access-zhbgw\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.274194 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.821555 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t"] Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.824933 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:52:57 crc kubenswrapper[4768]: I1203 16:52:57.881704 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" event={"ID":"91a0e247-aab8-40b9-83e3-687d7f6a5927","Type":"ContainerStarted","Data":"db9c51e2572f3e734c421baae9247017c80a4760dfb830ff13212b24823dabf0"} Dec 03 16:53:01 crc kubenswrapper[4768]: I1203 16:53:01.923925 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" event={"ID":"91a0e247-aab8-40b9-83e3-687d7f6a5927","Type":"ContainerStarted","Data":"378ce88162936bcd235b6bc5dc8619ed343cd3de216f3029b040240549a97712"} Dec 03 16:53:01 crc kubenswrapper[4768]: I1203 16:53:01.951514 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" podStartSLOduration=3.210516859 podStartE2EDuration="5.951491262s" podCreationTimestamp="2025-12-03 16:52:56 +0000 UTC" firstStartedPulling="2025-12-03 16:52:57.82466459 +0000 UTC m=+2074.744001013" lastFinishedPulling="2025-12-03 16:53:00.565638993 +0000 UTC m=+2077.484975416" observedRunningTime="2025-12-03 16:53:01.944135936 +0000 UTC m=+2078.863472399" watchObservedRunningTime="2025-12-03 16:53:01.951491262 +0000 UTC m=+2078.870827695" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.569669 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-928kt"] Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.572933 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.592125 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-928kt"] Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.666611 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/07c8f093-5cef-4491-9aa6-d3efb9b06616-kube-api-access-7pclc\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.666914 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-utilities\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.667027 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-catalog-content\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.769014 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-utilities\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.769350 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-catalog-content\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.769406 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/07c8f093-5cef-4491-9aa6-d3efb9b06616-kube-api-access-7pclc\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.769844 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-utilities\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.770085 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-catalog-content\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.792884 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/07c8f093-5cef-4491-9aa6-d3efb9b06616-kube-api-access-7pclc\") pod \"redhat-operators-928kt\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:28 crc kubenswrapper[4768]: I1203 16:53:28.891538 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:29 crc kubenswrapper[4768]: I1203 16:53:29.415841 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-928kt"] Dec 03 16:53:30 crc kubenswrapper[4768]: I1203 16:53:30.226004 4768 generic.go:334] "Generic (PLEG): container finished" podID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerID="16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25" exitCode=0 Dec 03 16:53:30 crc kubenswrapper[4768]: I1203 16:53:30.226081 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-928kt" event={"ID":"07c8f093-5cef-4491-9aa6-d3efb9b06616","Type":"ContainerDied","Data":"16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25"} Dec 03 16:53:30 crc kubenswrapper[4768]: I1203 16:53:30.226286 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-928kt" event={"ID":"07c8f093-5cef-4491-9aa6-d3efb9b06616","Type":"ContainerStarted","Data":"b9de211614ad8773a06624b61e51fe9676e54e809a5310458a47f88fd49fbfc7"} Dec 03 16:53:31 crc kubenswrapper[4768]: I1203 16:53:31.240393 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-928kt" event={"ID":"07c8f093-5cef-4491-9aa6-d3efb9b06616","Type":"ContainerStarted","Data":"908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df"} Dec 03 16:53:36 crc kubenswrapper[4768]: I1203 16:53:36.291870 4768 generic.go:334] "Generic (PLEG): container finished" podID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerID="908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df" exitCode=0 Dec 03 16:53:36 crc kubenswrapper[4768]: I1203 16:53:36.292391 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-928kt" event={"ID":"07c8f093-5cef-4491-9aa6-d3efb9b06616","Type":"ContainerDied","Data":"908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df"} Dec 03 16:53:42 crc kubenswrapper[4768]: I1203 16:53:42.291783 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="metallb-system/controller-f8648f98b-sbbq7" podUID="8bb39058-0f85-42fe-884e-f7ea6e389a1e" containerName="controller" probeResult="failure" output="Get \"http://10.217.0.74:29150/metrics\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 03 16:53:47 crc kubenswrapper[4768]: I1203 16:53:47.042433 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6x6hp"] Dec 03 16:53:47 crc kubenswrapper[4768]: I1203 16:53:47.053441 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6x6hp"] Dec 03 16:53:47 crc kubenswrapper[4768]: I1203 16:53:47.422930 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-928kt" event={"ID":"07c8f093-5cef-4491-9aa6-d3efb9b06616","Type":"ContainerStarted","Data":"061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82"} Dec 03 16:53:47 crc kubenswrapper[4768]: I1203 16:53:47.452242 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-928kt" podStartSLOduration=2.993745345 podStartE2EDuration="19.452222581s" podCreationTimestamp="2025-12-03 16:53:28 +0000 UTC" firstStartedPulling="2025-12-03 16:53:30.227581305 +0000 UTC m=+2107.146917728" lastFinishedPulling="2025-12-03 16:53:46.686058541 +0000 UTC m=+2123.605394964" observedRunningTime="2025-12-03 16:53:47.450495897 +0000 UTC m=+2124.369832340" watchObservedRunningTime="2025-12-03 16:53:47.452222581 +0000 UTC m=+2124.371559004" Dec 03 16:53:47 crc kubenswrapper[4768]: I1203 16:53:47.544130 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfb313e1-4e5d-40b8-a882-82239fe7ae17" path="/var/lib/kubelet/pods/cfb313e1-4e5d-40b8-a882-82239fe7ae17/volumes" Dec 03 16:53:48 crc kubenswrapper[4768]: I1203 16:53:48.891799 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:48 crc kubenswrapper[4768]: I1203 16:53:48.892208 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:49 crc kubenswrapper[4768]: I1203 16:53:49.951823 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-928kt" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="registry-server" probeResult="failure" output=< Dec 03 16:53:49 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 16:53:49 crc kubenswrapper[4768]: > Dec 03 16:53:58 crc kubenswrapper[4768]: I1203 16:53:58.970350 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:59 crc kubenswrapper[4768]: I1203 16:53:59.030804 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:53:59 crc kubenswrapper[4768]: I1203 16:53:59.773136 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-928kt"] Dec 03 16:54:00 crc kubenswrapper[4768]: I1203 16:54:00.578003 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-928kt" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="registry-server" containerID="cri-o://061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82" gracePeriod=2 Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.275298 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.447312 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-catalog-content\") pod \"07c8f093-5cef-4491-9aa6-d3efb9b06616\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.447730 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-utilities\") pod \"07c8f093-5cef-4491-9aa6-d3efb9b06616\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.447765 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/07c8f093-5cef-4491-9aa6-d3efb9b06616-kube-api-access-7pclc\") pod \"07c8f093-5cef-4491-9aa6-d3efb9b06616\" (UID: \"07c8f093-5cef-4491-9aa6-d3efb9b06616\") " Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.450768 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-utilities" (OuterVolumeSpecName: "utilities") pod "07c8f093-5cef-4491-9aa6-d3efb9b06616" (UID: "07c8f093-5cef-4491-9aa6-d3efb9b06616"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.453958 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07c8f093-5cef-4491-9aa6-d3efb9b06616-kube-api-access-7pclc" (OuterVolumeSpecName: "kube-api-access-7pclc") pod "07c8f093-5cef-4491-9aa6-d3efb9b06616" (UID: "07c8f093-5cef-4491-9aa6-d3efb9b06616"). InnerVolumeSpecName "kube-api-access-7pclc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.550522 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.550603 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pclc\" (UniqueName: \"kubernetes.io/projected/07c8f093-5cef-4491-9aa6-d3efb9b06616-kube-api-access-7pclc\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.596533 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07c8f093-5cef-4491-9aa6-d3efb9b06616" (UID: "07c8f093-5cef-4491-9aa6-d3efb9b06616"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.609001 4768 generic.go:334] "Generic (PLEG): container finished" podID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerID="061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82" exitCode=0 Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.609043 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-928kt" event={"ID":"07c8f093-5cef-4491-9aa6-d3efb9b06616","Type":"ContainerDied","Data":"061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82"} Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.609074 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-928kt" event={"ID":"07c8f093-5cef-4491-9aa6-d3efb9b06616","Type":"ContainerDied","Data":"b9de211614ad8773a06624b61e51fe9676e54e809a5310458a47f88fd49fbfc7"} Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.609093 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-928kt" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.609096 4768 scope.go:117] "RemoveContainer" containerID="061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.630836 4768 scope.go:117] "RemoveContainer" containerID="908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.653332 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c8f093-5cef-4491-9aa6-d3efb9b06616-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.655254 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-928kt"] Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.663260 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-928kt"] Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.685264 4768 scope.go:117] "RemoveContainer" containerID="16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.729276 4768 scope.go:117] "RemoveContainer" containerID="061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82" Dec 03 16:54:02 crc kubenswrapper[4768]: E1203 16:54:02.729898 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82\": container with ID starting with 061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82 not found: ID does not exist" containerID="061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.729946 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82"} err="failed to get container status \"061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82\": rpc error: code = NotFound desc = could not find container \"061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82\": container with ID starting with 061a6531a7202c44af77208c9fb9ffacc4955037ab16c03516c8bb6c17fb2b82 not found: ID does not exist" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.729975 4768 scope.go:117] "RemoveContainer" containerID="908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df" Dec 03 16:54:02 crc kubenswrapper[4768]: E1203 16:54:02.730240 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df\": container with ID starting with 908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df not found: ID does not exist" containerID="908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.730272 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df"} err="failed to get container status \"908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df\": rpc error: code = NotFound desc = could not find container \"908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df\": container with ID starting with 908c39d180f6372f7b306f36fb5b68c3df0479de74a52e66cb933c12e5b126df not found: ID does not exist" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.730300 4768 scope.go:117] "RemoveContainer" containerID="16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25" Dec 03 16:54:02 crc kubenswrapper[4768]: E1203 16:54:02.730579 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25\": container with ID starting with 16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25 not found: ID does not exist" containerID="16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25" Dec 03 16:54:02 crc kubenswrapper[4768]: I1203 16:54:02.730650 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25"} err="failed to get container status \"16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25\": rpc error: code = NotFound desc = could not find container \"16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25\": container with ID starting with 16fcbcc4199c1dfa1c40e456c0c847872acb6c05ec21a40187ba81a139f3ad25 not found: ID does not exist" Dec 03 16:54:03 crc kubenswrapper[4768]: I1203 16:54:03.552123 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" path="/var/lib/kubelet/pods/07c8f093-5cef-4491-9aa6-d3efb9b06616/volumes" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.515975 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fd4h7"] Dec 03 16:54:07 crc kubenswrapper[4768]: E1203 16:54:07.532371 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="extract-utilities" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.532413 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="extract-utilities" Dec 03 16:54:07 crc kubenswrapper[4768]: E1203 16:54:07.532440 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="extract-content" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.532449 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="extract-content" Dec 03 16:54:07 crc kubenswrapper[4768]: E1203 16:54:07.532482 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="registry-server" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.532490 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="registry-server" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.533058 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="07c8f093-5cef-4491-9aa6-d3efb9b06616" containerName="registry-server" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.535452 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.567625 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fd4h7"] Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.653468 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsf7h\" (UniqueName: \"kubernetes.io/projected/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-kube-api-access-rsf7h\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.653655 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-utilities\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.653682 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-catalog-content\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.756079 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-utilities\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.756138 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-catalog-content\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.756287 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsf7h\" (UniqueName: \"kubernetes.io/projected/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-kube-api-access-rsf7h\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.756531 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-utilities\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.756989 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-catalog-content\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.779013 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsf7h\" (UniqueName: \"kubernetes.io/projected/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-kube-api-access-rsf7h\") pod \"community-operators-fd4h7\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:07 crc kubenswrapper[4768]: I1203 16:54:07.870024 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:08 crc kubenswrapper[4768]: I1203 16:54:08.450207 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fd4h7"] Dec 03 16:54:08 crc kubenswrapper[4768]: I1203 16:54:08.671402 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fd4h7" event={"ID":"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6","Type":"ContainerStarted","Data":"c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a"} Dec 03 16:54:09 crc kubenswrapper[4768]: I1203 16:54:09.684353 4768 generic.go:334] "Generic (PLEG): container finished" podID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerID="1281e5b808e9878056acaefe75dca26ac010808006f0cf788c75e9a6ebbeca36" exitCode=0 Dec 03 16:54:09 crc kubenswrapper[4768]: I1203 16:54:09.684426 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fd4h7" event={"ID":"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6","Type":"ContainerDied","Data":"1281e5b808e9878056acaefe75dca26ac010808006f0cf788c75e9a6ebbeca36"} Dec 03 16:54:10 crc kubenswrapper[4768]: I1203 16:54:10.701076 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fd4h7" event={"ID":"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6","Type":"ContainerStarted","Data":"5412f52b5416c6e4bc4e4fc401fe1e9983ebbe1f4063125204d0e05414100f6e"} Dec 03 16:54:11 crc kubenswrapper[4768]: I1203 16:54:11.060297 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-bcfpr"] Dec 03 16:54:11 crc kubenswrapper[4768]: I1203 16:54:11.079031 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-bcfpr"] Dec 03 16:54:11 crc kubenswrapper[4768]: I1203 16:54:11.544646 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89dc866f-2349-4947-8f06-ca7046e66709" path="/var/lib/kubelet/pods/89dc866f-2349-4947-8f06-ca7046e66709/volumes" Dec 03 16:54:11 crc kubenswrapper[4768]: I1203 16:54:11.713512 4768 generic.go:334] "Generic (PLEG): container finished" podID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerID="5412f52b5416c6e4bc4e4fc401fe1e9983ebbe1f4063125204d0e05414100f6e" exitCode=0 Dec 03 16:54:11 crc kubenswrapper[4768]: I1203 16:54:11.713588 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fd4h7" event={"ID":"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6","Type":"ContainerDied","Data":"5412f52b5416c6e4bc4e4fc401fe1e9983ebbe1f4063125204d0e05414100f6e"} Dec 03 16:54:12 crc kubenswrapper[4768]: I1203 16:54:12.724216 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fd4h7" event={"ID":"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6","Type":"ContainerStarted","Data":"84360ffbccab68ebf215be07d8f6c84b3a54e365d78a977c362441818e19047e"} Dec 03 16:54:12 crc kubenswrapper[4768]: I1203 16:54:12.746672 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fd4h7" podStartSLOduration=3.33169431 podStartE2EDuration="5.746657364s" podCreationTimestamp="2025-12-03 16:54:07 +0000 UTC" firstStartedPulling="2025-12-03 16:54:09.686658089 +0000 UTC m=+2146.605994522" lastFinishedPulling="2025-12-03 16:54:12.101621153 +0000 UTC m=+2149.020957576" observedRunningTime="2025-12-03 16:54:12.739150974 +0000 UTC m=+2149.658487397" watchObservedRunningTime="2025-12-03 16:54:12.746657364 +0000 UTC m=+2149.665993787" Dec 03 16:54:17 crc kubenswrapper[4768]: I1203 16:54:17.032965 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zrc69"] Dec 03 16:54:17 crc kubenswrapper[4768]: I1203 16:54:17.045129 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-zrc69"] Dec 03 16:54:17 crc kubenswrapper[4768]: I1203 16:54:17.543367 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d6daa39-bd99-4e98-a817-c18efd139e3c" path="/var/lib/kubelet/pods/1d6daa39-bd99-4e98-a817-c18efd139e3c/volumes" Dec 03 16:54:17 crc kubenswrapper[4768]: I1203 16:54:17.870719 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:17 crc kubenswrapper[4768]: I1203 16:54:17.871049 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:17 crc kubenswrapper[4768]: I1203 16:54:17.922389 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:18 crc kubenswrapper[4768]: I1203 16:54:18.876330 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:18 crc kubenswrapper[4768]: I1203 16:54:18.944747 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fd4h7"] Dec 03 16:54:20 crc kubenswrapper[4768]: I1203 16:54:20.806488 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fd4h7" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="registry-server" containerID="cri-o://84360ffbccab68ebf215be07d8f6c84b3a54e365d78a977c362441818e19047e" gracePeriod=2 Dec 03 16:54:21 crc kubenswrapper[4768]: I1203 16:54:21.820375 4768 generic.go:334] "Generic (PLEG): container finished" podID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerID="84360ffbccab68ebf215be07d8f6c84b3a54e365d78a977c362441818e19047e" exitCode=0 Dec 03 16:54:21 crc kubenswrapper[4768]: I1203 16:54:21.820411 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fd4h7" event={"ID":"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6","Type":"ContainerDied","Data":"84360ffbccab68ebf215be07d8f6c84b3a54e365d78a977c362441818e19047e"} Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.169749 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.305858 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsf7h\" (UniqueName: \"kubernetes.io/projected/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-kube-api-access-rsf7h\") pod \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.305966 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-utilities\") pod \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.306082 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-catalog-content\") pod \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\" (UID: \"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6\") " Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.307844 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-utilities" (OuterVolumeSpecName: "utilities") pod "dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" (UID: "dc9f7c86-2ec4-4377-aebc-882c84f1e7a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.312267 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-kube-api-access-rsf7h" (OuterVolumeSpecName: "kube-api-access-rsf7h") pod "dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" (UID: "dc9f7c86-2ec4-4377-aebc-882c84f1e7a6"). InnerVolumeSpecName "kube-api-access-rsf7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.382361 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" (UID: "dc9f7c86-2ec4-4377-aebc-882c84f1e7a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.410180 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.410250 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.410278 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsf7h\" (UniqueName: \"kubernetes.io/projected/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6-kube-api-access-rsf7h\") on node \"crc\" DevicePath \"\"" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.872451 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fd4h7" event={"ID":"dc9f7c86-2ec4-4377-aebc-882c84f1e7a6","Type":"ContainerDied","Data":"c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a"} Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.872514 4768 scope.go:117] "RemoveContainer" containerID="84360ffbccab68ebf215be07d8f6c84b3a54e365d78a977c362441818e19047e" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.872731 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fd4h7" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.900501 4768 scope.go:117] "RemoveContainer" containerID="5412f52b5416c6e4bc4e4fc401fe1e9983ebbe1f4063125204d0e05414100f6e" Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.903038 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fd4h7"] Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.913045 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fd4h7"] Dec 03 16:54:23 crc kubenswrapper[4768]: I1203 16:54:23.936107 4768 scope.go:117] "RemoveContainer" containerID="1281e5b808e9878056acaefe75dca26ac010808006f0cf788c75e9a6ebbeca36" Dec 03 16:54:25 crc kubenswrapper[4768]: I1203 16:54:25.547140 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" path="/var/lib/kubelet/pods/dc9f7c86-2ec4-4377-aebc-882c84f1e7a6/volumes" Dec 03 16:54:28 crc kubenswrapper[4768]: E1203 16:54:28.010021 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice/crio-c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice\": RecentStats: unable to find data in memory cache]" Dec 03 16:54:38 crc kubenswrapper[4768]: E1203 16:54:38.282572 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice/crio-c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a\": RecentStats: unable to find data in memory cache]" Dec 03 16:54:47 crc kubenswrapper[4768]: I1203 16:54:47.002788 4768 scope.go:117] "RemoveContainer" containerID="b6d6a5ff21df3ff14e4289bf01a3a2e197261eebd4b2ebacf995c27133347dc4" Dec 03 16:54:47 crc kubenswrapper[4768]: I1203 16:54:47.345677 4768 scope.go:117] "RemoveContainer" containerID="394e144f1f9efa209bd84a5a938002f2ce150e3b1ba69d23cdadbb701d1f28d8" Dec 03 16:54:47 crc kubenswrapper[4768]: I1203 16:54:47.411371 4768 scope.go:117] "RemoveContainer" containerID="0bdc5bff95bafddb5e456364d3946694a10ee078c227fdac0d93aeef641538fb" Dec 03 16:54:48 crc kubenswrapper[4768]: E1203 16:54:48.601350 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice/crio-c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a\": RecentStats: unable to find data in memory cache]" Dec 03 16:54:56 crc kubenswrapper[4768]: I1203 16:54:56.028088 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:54:56 crc kubenswrapper[4768]: I1203 16:54:56.028655 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:54:58 crc kubenswrapper[4768]: I1203 16:54:58.056496 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-fcg7d"] Dec 03 16:54:58 crc kubenswrapper[4768]: I1203 16:54:58.065193 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-fcg7d"] Dec 03 16:54:58 crc kubenswrapper[4768]: E1203 16:54:58.912268 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice/crio-c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice\": RecentStats: unable to find data in memory cache]" Dec 03 16:54:59 crc kubenswrapper[4768]: I1203 16:54:59.545900 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26b7ee95-c386-4920-9c4d-9d74ae5655c4" path="/var/lib/kubelet/pods/26b7ee95-c386-4920-9c4d-9d74ae5655c4/volumes" Dec 03 16:55:09 crc kubenswrapper[4768]: E1203 16:55:09.229585 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice/crio-c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a\": RecentStats: unable to find data in memory cache]" Dec 03 16:55:15 crc kubenswrapper[4768]: I1203 16:55:15.448305 4768 generic.go:334] "Generic (PLEG): container finished" podID="91a0e247-aab8-40b9-83e3-687d7f6a5927" containerID="378ce88162936bcd235b6bc5dc8619ed343cd3de216f3029b040240549a97712" exitCode=0 Dec 03 16:55:15 crc kubenswrapper[4768]: I1203 16:55:15.448407 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" event={"ID":"91a0e247-aab8-40b9-83e3-687d7f6a5927","Type":"ContainerDied","Data":"378ce88162936bcd235b6bc5dc8619ed343cd3de216f3029b040240549a97712"} Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.651889 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.790882 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-ssh-key\") pod \"91a0e247-aab8-40b9-83e3-687d7f6a5927\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.791047 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhbgw\" (UniqueName: \"kubernetes.io/projected/91a0e247-aab8-40b9-83e3-687d7f6a5927-kube-api-access-zhbgw\") pod \"91a0e247-aab8-40b9-83e3-687d7f6a5927\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.791128 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-inventory\") pod \"91a0e247-aab8-40b9-83e3-687d7f6a5927\" (UID: \"91a0e247-aab8-40b9-83e3-687d7f6a5927\") " Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.797632 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91a0e247-aab8-40b9-83e3-687d7f6a5927-kube-api-access-zhbgw" (OuterVolumeSpecName: "kube-api-access-zhbgw") pod "91a0e247-aab8-40b9-83e3-687d7f6a5927" (UID: "91a0e247-aab8-40b9-83e3-687d7f6a5927"). InnerVolumeSpecName "kube-api-access-zhbgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.839999 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "91a0e247-aab8-40b9-83e3-687d7f6a5927" (UID: "91a0e247-aab8-40b9-83e3-687d7f6a5927"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.841021 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-inventory" (OuterVolumeSpecName: "inventory") pod "91a0e247-aab8-40b9-83e3-687d7f6a5927" (UID: "91a0e247-aab8-40b9-83e3-687d7f6a5927"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.893544 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhbgw\" (UniqueName: \"kubernetes.io/projected/91a0e247-aab8-40b9-83e3-687d7f6a5927-kube-api-access-zhbgw\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.893587 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:17 crc kubenswrapper[4768]: I1203 16:55:17.893650 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/91a0e247-aab8-40b9-83e3-687d7f6a5927-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.477533 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" event={"ID":"91a0e247-aab8-40b9-83e3-687d7f6a5927","Type":"ContainerDied","Data":"db9c51e2572f3e734c421baae9247017c80a4760dfb830ff13212b24823dabf0"} Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.477578 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db9c51e2572f3e734c421baae9247017c80a4760dfb830ff13212b24823dabf0" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.477622 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.751393 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf"] Dec 03 16:55:18 crc kubenswrapper[4768]: E1203 16:55:18.751812 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="registry-server" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.751823 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="registry-server" Dec 03 16:55:18 crc kubenswrapper[4768]: E1203 16:55:18.751858 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="extract-content" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.751864 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="extract-content" Dec 03 16:55:18 crc kubenswrapper[4768]: E1203 16:55:18.751885 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91a0e247-aab8-40b9-83e3-687d7f6a5927" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.751891 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="91a0e247-aab8-40b9-83e3-687d7f6a5927" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 16:55:18 crc kubenswrapper[4768]: E1203 16:55:18.751900 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="extract-utilities" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.751906 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="extract-utilities" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.752081 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc9f7c86-2ec4-4377-aebc-882c84f1e7a6" containerName="registry-server" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.752099 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="91a0e247-aab8-40b9-83e3-687d7f6a5927" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.752799 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.756188 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.757441 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.757493 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.758702 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.769059 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf"] Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.915332 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vjkm\" (UniqueName: \"kubernetes.io/projected/8258f70b-4e7b-40d0-af22-a50690f99fa0-kube-api-access-7vjkm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.915494 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:18 crc kubenswrapper[4768]: I1203 16:55:18.915535 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.017800 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vjkm\" (UniqueName: \"kubernetes.io/projected/8258f70b-4e7b-40d0-af22-a50690f99fa0-kube-api-access-7vjkm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.018048 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.018131 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.023925 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.026776 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.052531 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vjkm\" (UniqueName: \"kubernetes.io/projected/8258f70b-4e7b-40d0-af22-a50690f99fa0-kube-api-access-7vjkm\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.119397 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:55:19 crc kubenswrapper[4768]: E1203 16:55:19.510377 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddc9f7c86_2ec4_4377_aebc_882c84f1e7a6.slice/crio-c4d1390b16ffd7eaf92d2e1cd37b76aa390aabfacb553eafbd506268f459456a\": RecentStats: unable to find data in memory cache]" Dec 03 16:55:19 crc kubenswrapper[4768]: I1203 16:55:19.705567 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf"] Dec 03 16:55:19 crc kubenswrapper[4768]: W1203 16:55:19.712723 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8258f70b_4e7b_40d0_af22_a50690f99fa0.slice/crio-b30ad0c6cf90d2310c62e2ed149b376f0b81331c391549362afc238519d921c4 WatchSource:0}: Error finding container b30ad0c6cf90d2310c62e2ed149b376f0b81331c391549362afc238519d921c4: Status 404 returned error can't find the container with id b30ad0c6cf90d2310c62e2ed149b376f0b81331c391549362afc238519d921c4 Dec 03 16:55:20 crc kubenswrapper[4768]: I1203 16:55:20.504466 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" event={"ID":"8258f70b-4e7b-40d0-af22-a50690f99fa0","Type":"ContainerStarted","Data":"b30ad0c6cf90d2310c62e2ed149b376f0b81331c391549362afc238519d921c4"} Dec 03 16:55:20 crc kubenswrapper[4768]: I1203 16:55:20.546966 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" podStartSLOduration=2.032108553 podStartE2EDuration="2.546938186s" podCreationTimestamp="2025-12-03 16:55:18 +0000 UTC" firstStartedPulling="2025-12-03 16:55:19.716399895 +0000 UTC m=+2216.635736318" lastFinishedPulling="2025-12-03 16:55:20.231229528 +0000 UTC m=+2217.150565951" observedRunningTime="2025-12-03 16:55:20.532259955 +0000 UTC m=+2217.451596458" watchObservedRunningTime="2025-12-03 16:55:20.546938186 +0000 UTC m=+2217.466274649" Dec 03 16:55:21 crc kubenswrapper[4768]: I1203 16:55:21.518773 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" event={"ID":"8258f70b-4e7b-40d0-af22-a50690f99fa0","Type":"ContainerStarted","Data":"468a6aa9f2aee82af4667cfc845d416f756069348fbc0c6135c6dc570a66fef0"} Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.632714 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vssgs"] Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.637970 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.678738 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vssgs"] Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.832972 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-catalog-content\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.833139 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzl5n\" (UniqueName: \"kubernetes.io/projected/6f242a59-2fa2-496d-8be1-c036dbbe00dd-kube-api-access-bzl5n\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.833298 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-utilities\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.935226 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-catalog-content\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.935354 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzl5n\" (UniqueName: \"kubernetes.io/projected/6f242a59-2fa2-496d-8be1-c036dbbe00dd-kube-api-access-bzl5n\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.935421 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-utilities\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.936172 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-utilities\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.936331 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-catalog-content\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.960887 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzl5n\" (UniqueName: \"kubernetes.io/projected/6f242a59-2fa2-496d-8be1-c036dbbe00dd-kube-api-access-bzl5n\") pod \"certified-operators-vssgs\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:22 crc kubenswrapper[4768]: I1203 16:55:22.993405 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:23 crc kubenswrapper[4768]: I1203 16:55:23.515015 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vssgs"] Dec 03 16:55:23 crc kubenswrapper[4768]: I1203 16:55:23.561176 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vssgs" event={"ID":"6f242a59-2fa2-496d-8be1-c036dbbe00dd","Type":"ContainerStarted","Data":"73c86be595fe89686e74dd39fcd023e4b10991cc6b6fdc8384d97a4d97d172cf"} Dec 03 16:55:24 crc kubenswrapper[4768]: I1203 16:55:24.555395 4768 generic.go:334] "Generic (PLEG): container finished" podID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerID="26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854" exitCode=0 Dec 03 16:55:24 crc kubenswrapper[4768]: I1203 16:55:24.555471 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vssgs" event={"ID":"6f242a59-2fa2-496d-8be1-c036dbbe00dd","Type":"ContainerDied","Data":"26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854"} Dec 03 16:55:25 crc kubenswrapper[4768]: I1203 16:55:25.573623 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vssgs" event={"ID":"6f242a59-2fa2-496d-8be1-c036dbbe00dd","Type":"ContainerStarted","Data":"df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953"} Dec 03 16:55:26 crc kubenswrapper[4768]: I1203 16:55:26.028101 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:55:26 crc kubenswrapper[4768]: I1203 16:55:26.028171 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:55:26 crc kubenswrapper[4768]: I1203 16:55:26.587770 4768 generic.go:334] "Generic (PLEG): container finished" podID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerID="df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953" exitCode=0 Dec 03 16:55:26 crc kubenswrapper[4768]: I1203 16:55:26.587890 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vssgs" event={"ID":"6f242a59-2fa2-496d-8be1-c036dbbe00dd","Type":"ContainerDied","Data":"df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953"} Dec 03 16:55:28 crc kubenswrapper[4768]: I1203 16:55:28.613053 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vssgs" event={"ID":"6f242a59-2fa2-496d-8be1-c036dbbe00dd","Type":"ContainerStarted","Data":"df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4"} Dec 03 16:55:28 crc kubenswrapper[4768]: I1203 16:55:28.638953 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vssgs" podStartSLOduration=3.665192968 podStartE2EDuration="6.638936337s" podCreationTimestamp="2025-12-03 16:55:22 +0000 UTC" firstStartedPulling="2025-12-03 16:55:24.560958802 +0000 UTC m=+2221.480295245" lastFinishedPulling="2025-12-03 16:55:27.534702191 +0000 UTC m=+2224.454038614" observedRunningTime="2025-12-03 16:55:28.63313229 +0000 UTC m=+2225.552468713" watchObservedRunningTime="2025-12-03 16:55:28.638936337 +0000 UTC m=+2225.558272760" Dec 03 16:55:32 crc kubenswrapper[4768]: I1203 16:55:32.994263 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:32 crc kubenswrapper[4768]: I1203 16:55:32.994935 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:33 crc kubenswrapper[4768]: I1203 16:55:33.050910 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:33 crc kubenswrapper[4768]: I1203 16:55:33.715608 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.297071 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vssgs"] Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.513252 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-grnbz"] Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.517878 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.539534 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-grnbz"] Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.650064 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-catalog-content\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.651273 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-utilities\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.651435 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpqr6\" (UniqueName: \"kubernetes.io/projected/861bad34-b78e-4550-a4e8-6266ddb00732-kube-api-access-gpqr6\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.753839 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-utilities\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.753936 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpqr6\" (UniqueName: \"kubernetes.io/projected/861bad34-b78e-4550-a4e8-6266ddb00732-kube-api-access-gpqr6\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.753984 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-catalog-content\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.754337 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-utilities\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.754897 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-catalog-content\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.793221 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpqr6\" (UniqueName: \"kubernetes.io/projected/861bad34-b78e-4550-a4e8-6266ddb00732-kube-api-access-gpqr6\") pod \"redhat-marketplace-grnbz\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:34 crc kubenswrapper[4768]: I1203 16:55:34.840566 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:35 crc kubenswrapper[4768]: I1203 16:55:35.355675 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-grnbz"] Dec 03 16:55:35 crc kubenswrapper[4768]: I1203 16:55:35.689842 4768 generic.go:334] "Generic (PLEG): container finished" podID="861bad34-b78e-4550-a4e8-6266ddb00732" containerID="b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e" exitCode=0 Dec 03 16:55:35 crc kubenswrapper[4768]: I1203 16:55:35.690153 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vssgs" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="registry-server" containerID="cri-o://df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4" gracePeriod=2 Dec 03 16:55:35 crc kubenswrapper[4768]: I1203 16:55:35.690786 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grnbz" event={"ID":"861bad34-b78e-4550-a4e8-6266ddb00732","Type":"ContainerDied","Data":"b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e"} Dec 03 16:55:35 crc kubenswrapper[4768]: I1203 16:55:35.690844 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grnbz" event={"ID":"861bad34-b78e-4550-a4e8-6266ddb00732","Type":"ContainerStarted","Data":"65c304882c8d18a78f214767d95563ce3c847a10f60d163688a534deba069b8e"} Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.225168 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.392702 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzl5n\" (UniqueName: \"kubernetes.io/projected/6f242a59-2fa2-496d-8be1-c036dbbe00dd-kube-api-access-bzl5n\") pod \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.393210 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-catalog-content\") pod \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.393299 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-utilities\") pod \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\" (UID: \"6f242a59-2fa2-496d-8be1-c036dbbe00dd\") " Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.396270 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-utilities" (OuterVolumeSpecName: "utilities") pod "6f242a59-2fa2-496d-8be1-c036dbbe00dd" (UID: "6f242a59-2fa2-496d-8be1-c036dbbe00dd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.402380 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f242a59-2fa2-496d-8be1-c036dbbe00dd-kube-api-access-bzl5n" (OuterVolumeSpecName: "kube-api-access-bzl5n") pod "6f242a59-2fa2-496d-8be1-c036dbbe00dd" (UID: "6f242a59-2fa2-496d-8be1-c036dbbe00dd"). InnerVolumeSpecName "kube-api-access-bzl5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.473041 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f242a59-2fa2-496d-8be1-c036dbbe00dd" (UID: "6f242a59-2fa2-496d-8be1-c036dbbe00dd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.495523 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.495562 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzl5n\" (UniqueName: \"kubernetes.io/projected/6f242a59-2fa2-496d-8be1-c036dbbe00dd-kube-api-access-bzl5n\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.495573 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f242a59-2fa2-496d-8be1-c036dbbe00dd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.709451 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grnbz" event={"ID":"861bad34-b78e-4550-a4e8-6266ddb00732","Type":"ContainerStarted","Data":"2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3"} Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.713557 4768 generic.go:334] "Generic (PLEG): container finished" podID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerID="df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4" exitCode=0 Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.713622 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vssgs" event={"ID":"6f242a59-2fa2-496d-8be1-c036dbbe00dd","Type":"ContainerDied","Data":"df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4"} Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.713649 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vssgs" event={"ID":"6f242a59-2fa2-496d-8be1-c036dbbe00dd","Type":"ContainerDied","Data":"73c86be595fe89686e74dd39fcd023e4b10991cc6b6fdc8384d97a4d97d172cf"} Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.713667 4768 scope.go:117] "RemoveContainer" containerID="df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.713793 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vssgs" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.766339 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vssgs"] Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.771684 4768 scope.go:117] "RemoveContainer" containerID="df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.775421 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vssgs"] Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.805949 4768 scope.go:117] "RemoveContainer" containerID="26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.882572 4768 scope.go:117] "RemoveContainer" containerID="df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4" Dec 03 16:55:36 crc kubenswrapper[4768]: E1203 16:55:36.883571 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4\": container with ID starting with df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4 not found: ID does not exist" containerID="df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.883633 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4"} err="failed to get container status \"df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4\": rpc error: code = NotFound desc = could not find container \"df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4\": container with ID starting with df3adc97e8e75e1c0b358b8955f54b63b9de40d47548bd8069d6b2e32c942aa4 not found: ID does not exist" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.883661 4768 scope.go:117] "RemoveContainer" containerID="df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953" Dec 03 16:55:36 crc kubenswrapper[4768]: E1203 16:55:36.884125 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953\": container with ID starting with df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953 not found: ID does not exist" containerID="df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.884216 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953"} err="failed to get container status \"df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953\": rpc error: code = NotFound desc = could not find container \"df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953\": container with ID starting with df7e3f4e864da9b391ab43115fc865d3361f173974ed201feeaaa45868550953 not found: ID does not exist" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.884266 4768 scope.go:117] "RemoveContainer" containerID="26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854" Dec 03 16:55:36 crc kubenswrapper[4768]: E1203 16:55:36.884546 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854\": container with ID starting with 26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854 not found: ID does not exist" containerID="26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854" Dec 03 16:55:36 crc kubenswrapper[4768]: I1203 16:55:36.884575 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854"} err="failed to get container status \"26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854\": rpc error: code = NotFound desc = could not find container \"26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854\": container with ID starting with 26a61c2556150f0e14d2ea2c61dcf6e8d512935d065a1efeec4df904f9b29854 not found: ID does not exist" Dec 03 16:55:37 crc kubenswrapper[4768]: I1203 16:55:37.551971 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" path="/var/lib/kubelet/pods/6f242a59-2fa2-496d-8be1-c036dbbe00dd/volumes" Dec 03 16:55:37 crc kubenswrapper[4768]: I1203 16:55:37.737932 4768 generic.go:334] "Generic (PLEG): container finished" podID="861bad34-b78e-4550-a4e8-6266ddb00732" containerID="2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3" exitCode=0 Dec 03 16:55:37 crc kubenswrapper[4768]: I1203 16:55:37.738070 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grnbz" event={"ID":"861bad34-b78e-4550-a4e8-6266ddb00732","Type":"ContainerDied","Data":"2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3"} Dec 03 16:55:38 crc kubenswrapper[4768]: I1203 16:55:38.778817 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grnbz" event={"ID":"861bad34-b78e-4550-a4e8-6266ddb00732","Type":"ContainerStarted","Data":"7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28"} Dec 03 16:55:38 crc kubenswrapper[4768]: I1203 16:55:38.804919 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-grnbz" podStartSLOduration=2.310457124 podStartE2EDuration="4.80489797s" podCreationTimestamp="2025-12-03 16:55:34 +0000 UTC" firstStartedPulling="2025-12-03 16:55:35.693256707 +0000 UTC m=+2232.612593140" lastFinishedPulling="2025-12-03 16:55:38.187697523 +0000 UTC m=+2235.107033986" observedRunningTime="2025-12-03 16:55:38.803147306 +0000 UTC m=+2235.722483769" watchObservedRunningTime="2025-12-03 16:55:38.80489797 +0000 UTC m=+2235.724234393" Dec 03 16:55:44 crc kubenswrapper[4768]: I1203 16:55:44.840913 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:44 crc kubenswrapper[4768]: I1203 16:55:44.841684 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:44 crc kubenswrapper[4768]: I1203 16:55:44.925634 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:45 crc kubenswrapper[4768]: I1203 16:55:45.923699 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:45 crc kubenswrapper[4768]: I1203 16:55:45.990363 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-grnbz"] Dec 03 16:55:47 crc kubenswrapper[4768]: I1203 16:55:47.642900 4768 scope.go:117] "RemoveContainer" containerID="f894cbdb416cb160d9cd0622470ce2e8df59206fb0b6113c90ec4ea22581cc3c" Dec 03 16:55:47 crc kubenswrapper[4768]: I1203 16:55:47.885140 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-grnbz" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="registry-server" containerID="cri-o://7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28" gracePeriod=2 Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.429891 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.570205 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-catalog-content\") pod \"861bad34-b78e-4550-a4e8-6266ddb00732\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.570462 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpqr6\" (UniqueName: \"kubernetes.io/projected/861bad34-b78e-4550-a4e8-6266ddb00732-kube-api-access-gpqr6\") pod \"861bad34-b78e-4550-a4e8-6266ddb00732\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.570525 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-utilities\") pod \"861bad34-b78e-4550-a4e8-6266ddb00732\" (UID: \"861bad34-b78e-4550-a4e8-6266ddb00732\") " Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.571666 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-utilities" (OuterVolumeSpecName: "utilities") pod "861bad34-b78e-4550-a4e8-6266ddb00732" (UID: "861bad34-b78e-4550-a4e8-6266ddb00732"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.583816 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/861bad34-b78e-4550-a4e8-6266ddb00732-kube-api-access-gpqr6" (OuterVolumeSpecName: "kube-api-access-gpqr6") pod "861bad34-b78e-4550-a4e8-6266ddb00732" (UID: "861bad34-b78e-4550-a4e8-6266ddb00732"). InnerVolumeSpecName "kube-api-access-gpqr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.589994 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "861bad34-b78e-4550-a4e8-6266ddb00732" (UID: "861bad34-b78e-4550-a4e8-6266ddb00732"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.674166 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpqr6\" (UniqueName: \"kubernetes.io/projected/861bad34-b78e-4550-a4e8-6266ddb00732-kube-api-access-gpqr6\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.674238 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.674281 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/861bad34-b78e-4550-a4e8-6266ddb00732-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.902759 4768 generic.go:334] "Generic (PLEG): container finished" podID="861bad34-b78e-4550-a4e8-6266ddb00732" containerID="7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28" exitCode=0 Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.902808 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grnbz" event={"ID":"861bad34-b78e-4550-a4e8-6266ddb00732","Type":"ContainerDied","Data":"7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28"} Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.902851 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-grnbz" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.903238 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-grnbz" event={"ID":"861bad34-b78e-4550-a4e8-6266ddb00732","Type":"ContainerDied","Data":"65c304882c8d18a78f214767d95563ce3c847a10f60d163688a534deba069b8e"} Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.903364 4768 scope.go:117] "RemoveContainer" containerID="7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.960037 4768 scope.go:117] "RemoveContainer" containerID="2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3" Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.969448 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-grnbz"] Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.986203 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-grnbz"] Dec 03 16:55:48 crc kubenswrapper[4768]: I1203 16:55:48.989964 4768 scope.go:117] "RemoveContainer" containerID="b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e" Dec 03 16:55:49 crc kubenswrapper[4768]: I1203 16:55:49.061848 4768 scope.go:117] "RemoveContainer" containerID="7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28" Dec 03 16:55:49 crc kubenswrapper[4768]: E1203 16:55:49.062532 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28\": container with ID starting with 7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28 not found: ID does not exist" containerID="7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28" Dec 03 16:55:49 crc kubenswrapper[4768]: I1203 16:55:49.062579 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28"} err="failed to get container status \"7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28\": rpc error: code = NotFound desc = could not find container \"7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28\": container with ID starting with 7f37671af3df7c366a0713401d1b1a578bec9493413ff8e9712048b7a3d27f28 not found: ID does not exist" Dec 03 16:55:49 crc kubenswrapper[4768]: I1203 16:55:49.062629 4768 scope.go:117] "RemoveContainer" containerID="2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3" Dec 03 16:55:49 crc kubenswrapper[4768]: E1203 16:55:49.063082 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3\": container with ID starting with 2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3 not found: ID does not exist" containerID="2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3" Dec 03 16:55:49 crc kubenswrapper[4768]: I1203 16:55:49.063101 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3"} err="failed to get container status \"2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3\": rpc error: code = NotFound desc = could not find container \"2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3\": container with ID starting with 2769247dc39206060b8016c608ea06d1e708e8ede5028954766d3fc6e7b36cf3 not found: ID does not exist" Dec 03 16:55:49 crc kubenswrapper[4768]: I1203 16:55:49.063115 4768 scope.go:117] "RemoveContainer" containerID="b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e" Dec 03 16:55:49 crc kubenswrapper[4768]: E1203 16:55:49.063451 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e\": container with ID starting with b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e not found: ID does not exist" containerID="b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e" Dec 03 16:55:49 crc kubenswrapper[4768]: I1203 16:55:49.063478 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e"} err="failed to get container status \"b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e\": rpc error: code = NotFound desc = could not find container \"b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e\": container with ID starting with b7b39d92dec0f008602575ec21e311d1d0ca1a886cefd34fb82d55fecd2d9e3e not found: ID does not exist" Dec 03 16:55:49 crc kubenswrapper[4768]: I1203 16:55:49.548348 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" path="/var/lib/kubelet/pods/861bad34-b78e-4550-a4e8-6266ddb00732/volumes" Dec 03 16:55:56 crc kubenswrapper[4768]: I1203 16:55:56.028119 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 16:55:56 crc kubenswrapper[4768]: I1203 16:55:56.028968 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 16:55:56 crc kubenswrapper[4768]: I1203 16:55:56.029065 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 16:55:56 crc kubenswrapper[4768]: I1203 16:55:56.030399 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 16:55:56 crc kubenswrapper[4768]: I1203 16:55:56.030584 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" gracePeriod=600 Dec 03 16:55:56 crc kubenswrapper[4768]: E1203 16:55:56.171831 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:55:57 crc kubenswrapper[4768]: I1203 16:55:57.002962 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" exitCode=0 Dec 03 16:55:57 crc kubenswrapper[4768]: I1203 16:55:57.003054 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4"} Dec 03 16:55:57 crc kubenswrapper[4768]: I1203 16:55:57.003358 4768 scope.go:117] "RemoveContainer" containerID="b50653ba28a8284980c8fa897a37cb159ef35d14a7297dd3ec0ba365db9040d8" Dec 03 16:55:57 crc kubenswrapper[4768]: I1203 16:55:57.007055 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:55:57 crc kubenswrapper[4768]: E1203 16:55:57.008415 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:56:08 crc kubenswrapper[4768]: I1203 16:56:08.532663 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:56:08 crc kubenswrapper[4768]: E1203 16:56:08.534764 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:56:19 crc kubenswrapper[4768]: I1203 16:56:19.531582 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:56:19 crc kubenswrapper[4768]: E1203 16:56:19.532454 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:56:30 crc kubenswrapper[4768]: I1203 16:56:30.531997 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:56:30 crc kubenswrapper[4768]: E1203 16:56:30.532682 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:56:37 crc kubenswrapper[4768]: I1203 16:56:37.066073 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-jpj28"] Dec 03 16:56:37 crc kubenswrapper[4768]: I1203 16:56:37.079313 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-jpj28"] Dec 03 16:56:37 crc kubenswrapper[4768]: I1203 16:56:37.546309 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36dbacfe-876b-4926-8214-06db2bf33002" path="/var/lib/kubelet/pods/36dbacfe-876b-4926-8214-06db2bf33002/volumes" Dec 03 16:56:41 crc kubenswrapper[4768]: I1203 16:56:41.494797 4768 generic.go:334] "Generic (PLEG): container finished" podID="8258f70b-4e7b-40d0-af22-a50690f99fa0" containerID="468a6aa9f2aee82af4667cfc845d416f756069348fbc0c6135c6dc570a66fef0" exitCode=0 Dec 03 16:56:41 crc kubenswrapper[4768]: I1203 16:56:41.494924 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" event={"ID":"8258f70b-4e7b-40d0-af22-a50690f99fa0","Type":"ContainerDied","Data":"468a6aa9f2aee82af4667cfc845d416f756069348fbc0c6135c6dc570a66fef0"} Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.035820 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-c6rx6"] Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.046955 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-c6rx6"] Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.116314 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.172557 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-ssh-key\") pod \"8258f70b-4e7b-40d0-af22-a50690f99fa0\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.172945 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vjkm\" (UniqueName: \"kubernetes.io/projected/8258f70b-4e7b-40d0-af22-a50690f99fa0-kube-api-access-7vjkm\") pod \"8258f70b-4e7b-40d0-af22-a50690f99fa0\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.173004 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-inventory\") pod \"8258f70b-4e7b-40d0-af22-a50690f99fa0\" (UID: \"8258f70b-4e7b-40d0-af22-a50690f99fa0\") " Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.182747 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8258f70b-4e7b-40d0-af22-a50690f99fa0-kube-api-access-7vjkm" (OuterVolumeSpecName: "kube-api-access-7vjkm") pod "8258f70b-4e7b-40d0-af22-a50690f99fa0" (UID: "8258f70b-4e7b-40d0-af22-a50690f99fa0"). InnerVolumeSpecName "kube-api-access-7vjkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.201019 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8258f70b-4e7b-40d0-af22-a50690f99fa0" (UID: "8258f70b-4e7b-40d0-af22-a50690f99fa0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.206125 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-inventory" (OuterVolumeSpecName: "inventory") pod "8258f70b-4e7b-40d0-af22-a50690f99fa0" (UID: "8258f70b-4e7b-40d0-af22-a50690f99fa0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.275677 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vjkm\" (UniqueName: \"kubernetes.io/projected/8258f70b-4e7b-40d0-af22-a50690f99fa0-kube-api-access-7vjkm\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.275722 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.275738 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8258f70b-4e7b-40d0-af22-a50690f99fa0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.517159 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" event={"ID":"8258f70b-4e7b-40d0-af22-a50690f99fa0","Type":"ContainerDied","Data":"b30ad0c6cf90d2310c62e2ed149b376f0b81331c391549362afc238519d921c4"} Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.517201 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b30ad0c6cf90d2310c62e2ed149b376f0b81331c391549362afc238519d921c4" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.517296 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.544787 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e852a88e-fb99-477e-9b55-fa57a654c1f2" path="/var/lib/kubelet/pods/e852a88e-fb99-477e-9b55-fa57a654c1f2/volumes" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694270 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr"] Dec 03 16:56:43 crc kubenswrapper[4768]: E1203 16:56:43.694797 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="extract-utilities" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694822 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="extract-utilities" Dec 03 16:56:43 crc kubenswrapper[4768]: E1203 16:56:43.694847 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="extract-content" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694858 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="extract-content" Dec 03 16:56:43 crc kubenswrapper[4768]: E1203 16:56:43.694881 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="extract-utilities" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694891 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="extract-utilities" Dec 03 16:56:43 crc kubenswrapper[4768]: E1203 16:56:43.694903 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="extract-content" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694909 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="extract-content" Dec 03 16:56:43 crc kubenswrapper[4768]: E1203 16:56:43.694927 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="registry-server" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694933 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="registry-server" Dec 03 16:56:43 crc kubenswrapper[4768]: E1203 16:56:43.694944 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="registry-server" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694950 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="registry-server" Dec 03 16:56:43 crc kubenswrapper[4768]: E1203 16:56:43.694964 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8258f70b-4e7b-40d0-af22-a50690f99fa0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.694970 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8258f70b-4e7b-40d0-af22-a50690f99fa0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.695151 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f242a59-2fa2-496d-8be1-c036dbbe00dd" containerName="registry-server" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.695176 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="8258f70b-4e7b-40d0-af22-a50690f99fa0" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.695186 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="861bad34-b78e-4550-a4e8-6266ddb00732" containerName="registry-server" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.695973 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.698817 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.699559 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.700017 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.709293 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.709655 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr"] Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.786990 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph99g\" (UniqueName: \"kubernetes.io/projected/581e01fb-3c2a-4c39-926d-c25aebdfae5e-kube-api-access-ph99g\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.787071 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.787255 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.889520 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph99g\" (UniqueName: \"kubernetes.io/projected/581e01fb-3c2a-4c39-926d-c25aebdfae5e-kube-api-access-ph99g\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.889657 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.889768 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.894203 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.897948 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:43 crc kubenswrapper[4768]: I1203 16:56:43.913715 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph99g\" (UniqueName: \"kubernetes.io/projected/581e01fb-3c2a-4c39-926d-c25aebdfae5e-kube-api-access-ph99g\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:44 crc kubenswrapper[4768]: I1203 16:56:44.028433 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:44 crc kubenswrapper[4768]: I1203 16:56:44.531654 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:56:44 crc kubenswrapper[4768]: E1203 16:56:44.532208 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:56:44 crc kubenswrapper[4768]: I1203 16:56:44.658045 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr"] Dec 03 16:56:45 crc kubenswrapper[4768]: I1203 16:56:45.543549 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" event={"ID":"581e01fb-3c2a-4c39-926d-c25aebdfae5e","Type":"ContainerStarted","Data":"c5cea4a2c24124113d6476fa3f814fbce6302593b499ebc96745575ad8747c1d"} Dec 03 16:56:45 crc kubenswrapper[4768]: I1203 16:56:45.543921 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" event={"ID":"581e01fb-3c2a-4c39-926d-c25aebdfae5e","Type":"ContainerStarted","Data":"91e039fa1c7bc01ebca3c1bda211a41ee40ab58ee4f9cc69ac3e27ebf09cbe32"} Dec 03 16:56:45 crc kubenswrapper[4768]: I1203 16:56:45.564841 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" podStartSLOduration=2.122902355 podStartE2EDuration="2.564817873s" podCreationTimestamp="2025-12-03 16:56:43 +0000 UTC" firstStartedPulling="2025-12-03 16:56:44.662985763 +0000 UTC m=+2301.582322186" lastFinishedPulling="2025-12-03 16:56:45.104901281 +0000 UTC m=+2302.024237704" observedRunningTime="2025-12-03 16:56:45.561121996 +0000 UTC m=+2302.480458459" watchObservedRunningTime="2025-12-03 16:56:45.564817873 +0000 UTC m=+2302.484154326" Dec 03 16:56:47 crc kubenswrapper[4768]: I1203 16:56:47.731686 4768 scope.go:117] "RemoveContainer" containerID="6c2db4874b3370a6b85deb15833e5a3c12b03f09a95a58d01b31ac6b3bfaaa7f" Dec 03 16:56:47 crc kubenswrapper[4768]: I1203 16:56:47.767166 4768 scope.go:117] "RemoveContainer" containerID="54312468cb34f855971cf5f33681cfd2853f75b1b247529929dcf96bda996bba" Dec 03 16:56:51 crc kubenswrapper[4768]: I1203 16:56:51.629122 4768 generic.go:334] "Generic (PLEG): container finished" podID="581e01fb-3c2a-4c39-926d-c25aebdfae5e" containerID="c5cea4a2c24124113d6476fa3f814fbce6302593b499ebc96745575ad8747c1d" exitCode=0 Dec 03 16:56:51 crc kubenswrapper[4768]: I1203 16:56:51.629221 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" event={"ID":"581e01fb-3c2a-4c39-926d-c25aebdfae5e","Type":"ContainerDied","Data":"c5cea4a2c24124113d6476fa3f814fbce6302593b499ebc96745575ad8747c1d"} Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.174698 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.215385 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-inventory\") pod \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.215493 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph99g\" (UniqueName: \"kubernetes.io/projected/581e01fb-3c2a-4c39-926d-c25aebdfae5e-kube-api-access-ph99g\") pod \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.215742 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-ssh-key\") pod \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\" (UID: \"581e01fb-3c2a-4c39-926d-c25aebdfae5e\") " Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.222112 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/581e01fb-3c2a-4c39-926d-c25aebdfae5e-kube-api-access-ph99g" (OuterVolumeSpecName: "kube-api-access-ph99g") pod "581e01fb-3c2a-4c39-926d-c25aebdfae5e" (UID: "581e01fb-3c2a-4c39-926d-c25aebdfae5e"). InnerVolumeSpecName "kube-api-access-ph99g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.256995 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "581e01fb-3c2a-4c39-926d-c25aebdfae5e" (UID: "581e01fb-3c2a-4c39-926d-c25aebdfae5e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.260440 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-inventory" (OuterVolumeSpecName: "inventory") pod "581e01fb-3c2a-4c39-926d-c25aebdfae5e" (UID: "581e01fb-3c2a-4c39-926d-c25aebdfae5e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.319408 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.319463 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph99g\" (UniqueName: \"kubernetes.io/projected/581e01fb-3c2a-4c39-926d-c25aebdfae5e-kube-api-access-ph99g\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.319481 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/581e01fb-3c2a-4c39-926d-c25aebdfae5e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.650210 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" event={"ID":"581e01fb-3c2a-4c39-926d-c25aebdfae5e","Type":"ContainerDied","Data":"91e039fa1c7bc01ebca3c1bda211a41ee40ab58ee4f9cc69ac3e27ebf09cbe32"} Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.650496 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91e039fa1c7bc01ebca3c1bda211a41ee40ab58ee4f9cc69ac3e27ebf09cbe32" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.650285 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.805499 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz"] Dec 03 16:56:53 crc kubenswrapper[4768]: E1203 16:56:53.806461 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="581e01fb-3c2a-4c39-926d-c25aebdfae5e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.806537 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="581e01fb-3c2a-4c39-926d-c25aebdfae5e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.806812 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="581e01fb-3c2a-4c39-926d-c25aebdfae5e" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.807579 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.814529 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.814534 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.814580 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.815817 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.821805 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz"] Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.832238 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.832397 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gllmv\" (UniqueName: \"kubernetes.io/projected/a709e070-9d8d-43ab-8cca-46c4ac80bda3-kube-api-access-gllmv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.832531 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.934530 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.934602 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gllmv\" (UniqueName: \"kubernetes.io/projected/a709e070-9d8d-43ab-8cca-46c4ac80bda3-kube-api-access-gllmv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.934638 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.941784 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.951645 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:53 crc kubenswrapper[4768]: I1203 16:56:53.954988 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gllmv\" (UniqueName: \"kubernetes.io/projected/a709e070-9d8d-43ab-8cca-46c4ac80bda3-kube-api-access-gllmv\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-v5lgz\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:54 crc kubenswrapper[4768]: I1203 16:56:54.132940 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:56:54 crc kubenswrapper[4768]: I1203 16:56:54.766511 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz"] Dec 03 16:56:55 crc kubenswrapper[4768]: I1203 16:56:55.531836 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:56:55 crc kubenswrapper[4768]: E1203 16:56:55.532495 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:56:55 crc kubenswrapper[4768]: I1203 16:56:55.675749 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" event={"ID":"a709e070-9d8d-43ab-8cca-46c4ac80bda3","Type":"ContainerStarted","Data":"d4922518d8d079c30eb242c4e00883c24f5b4fdc175225d8f86988082c240db1"} Dec 03 16:56:55 crc kubenswrapper[4768]: I1203 16:56:55.675794 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" event={"ID":"a709e070-9d8d-43ab-8cca-46c4ac80bda3","Type":"ContainerStarted","Data":"186a85ce5f475d421a682f41f5e9502d3ea06f146243e8581c17d4aec6dfe39b"} Dec 03 16:57:08 crc kubenswrapper[4768]: I1203 16:57:08.531948 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:57:08 crc kubenswrapper[4768]: E1203 16:57:08.532688 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:57:22 crc kubenswrapper[4768]: I1203 16:57:22.531966 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:57:22 crc kubenswrapper[4768]: E1203 16:57:22.532806 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:57:35 crc kubenswrapper[4768]: I1203 16:57:35.531960 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:57:35 crc kubenswrapper[4768]: E1203 16:57:35.532694 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:57:37 crc kubenswrapper[4768]: I1203 16:57:37.109787 4768 generic.go:334] "Generic (PLEG): container finished" podID="a709e070-9d8d-43ab-8cca-46c4ac80bda3" containerID="d4922518d8d079c30eb242c4e00883c24f5b4fdc175225d8f86988082c240db1" exitCode=0 Dec 03 16:57:37 crc kubenswrapper[4768]: I1203 16:57:37.109864 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" event={"ID":"a709e070-9d8d-43ab-8cca-46c4ac80bda3","Type":"ContainerDied","Data":"d4922518d8d079c30eb242c4e00883c24f5b4fdc175225d8f86988082c240db1"} Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.705069 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.793737 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-inventory\") pod \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.794070 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-ssh-key\") pod \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.794134 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gllmv\" (UniqueName: \"kubernetes.io/projected/a709e070-9d8d-43ab-8cca-46c4ac80bda3-kube-api-access-gllmv\") pod \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\" (UID: \"a709e070-9d8d-43ab-8cca-46c4ac80bda3\") " Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.808799 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a709e070-9d8d-43ab-8cca-46c4ac80bda3-kube-api-access-gllmv" (OuterVolumeSpecName: "kube-api-access-gllmv") pod "a709e070-9d8d-43ab-8cca-46c4ac80bda3" (UID: "a709e070-9d8d-43ab-8cca-46c4ac80bda3"). InnerVolumeSpecName "kube-api-access-gllmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.823803 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a709e070-9d8d-43ab-8cca-46c4ac80bda3" (UID: "a709e070-9d8d-43ab-8cca-46c4ac80bda3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.832327 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-inventory" (OuterVolumeSpecName: "inventory") pod "a709e070-9d8d-43ab-8cca-46c4ac80bda3" (UID: "a709e070-9d8d-43ab-8cca-46c4ac80bda3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.896293 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.896335 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gllmv\" (UniqueName: \"kubernetes.io/projected/a709e070-9d8d-43ab-8cca-46c4ac80bda3-kube-api-access-gllmv\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:38 crc kubenswrapper[4768]: I1203 16:57:38.896354 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a709e070-9d8d-43ab-8cca-46c4ac80bda3-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.132756 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" event={"ID":"a709e070-9d8d-43ab-8cca-46c4ac80bda3","Type":"ContainerDied","Data":"186a85ce5f475d421a682f41f5e9502d3ea06f146243e8581c17d4aec6dfe39b"} Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.132815 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="186a85ce5f475d421a682f41f5e9502d3ea06f146243e8581c17d4aec6dfe39b" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.132842 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-v5lgz" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.247157 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85"] Dec 03 16:57:39 crc kubenswrapper[4768]: E1203 16:57:39.247529 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a709e070-9d8d-43ab-8cca-46c4ac80bda3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.247548 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="a709e070-9d8d-43ab-8cca-46c4ac80bda3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.247763 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="a709e070-9d8d-43ab-8cca-46c4ac80bda3" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.248444 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.252943 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.253161 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.253650 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.257521 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.262457 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85"] Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.405710 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.405912 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjsmp\" (UniqueName: \"kubernetes.io/projected/8660b1b7-7972-4b35-a50e-010de4788792-kube-api-access-bjsmp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.405943 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.507276 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.507343 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.507544 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjsmp\" (UniqueName: \"kubernetes.io/projected/8660b1b7-7972-4b35-a50e-010de4788792-kube-api-access-bjsmp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.512780 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.519427 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.523710 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjsmp\" (UniqueName: \"kubernetes.io/projected/8660b1b7-7972-4b35-a50e-010de4788792-kube-api-access-bjsmp\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-7hk85\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:39 crc kubenswrapper[4768]: I1203 16:57:39.608827 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:57:40 crc kubenswrapper[4768]: I1203 16:57:40.131514 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85"] Dec 03 16:57:41 crc kubenswrapper[4768]: I1203 16:57:41.206457 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" event={"ID":"8660b1b7-7972-4b35-a50e-010de4788792","Type":"ContainerStarted","Data":"8bf5a435c7dee4e7b9c06b8381e25f626a98bab0c1e42ba0cc9fa5e581a43cc3"} Dec 03 16:57:41 crc kubenswrapper[4768]: I1203 16:57:41.207163 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" event={"ID":"8660b1b7-7972-4b35-a50e-010de4788792","Type":"ContainerStarted","Data":"54186c6d221df18fed07a3a6e97149259e4bf17829ba344dfa48d93aa244f564"} Dec 03 16:57:46 crc kubenswrapper[4768]: I1203 16:57:46.532075 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:57:46 crc kubenswrapper[4768]: E1203 16:57:46.532882 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:57:58 crc kubenswrapper[4768]: I1203 16:57:58.532356 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:57:58 crc kubenswrapper[4768]: E1203 16:57:58.533335 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:58:13 crc kubenswrapper[4768]: I1203 16:58:13.546720 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:58:13 crc kubenswrapper[4768]: E1203 16:58:13.547580 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:58:26 crc kubenswrapper[4768]: I1203 16:58:26.533075 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:58:26 crc kubenswrapper[4768]: E1203 16:58:26.534626 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:58:39 crc kubenswrapper[4768]: I1203 16:58:39.842806 4768 generic.go:334] "Generic (PLEG): container finished" podID="8660b1b7-7972-4b35-a50e-010de4788792" containerID="8bf5a435c7dee4e7b9c06b8381e25f626a98bab0c1e42ba0cc9fa5e581a43cc3" exitCode=0 Dec 03 16:58:39 crc kubenswrapper[4768]: I1203 16:58:39.842904 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" event={"ID":"8660b1b7-7972-4b35-a50e-010de4788792","Type":"ContainerDied","Data":"8bf5a435c7dee4e7b9c06b8381e25f626a98bab0c1e42ba0cc9fa5e581a43cc3"} Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.341639 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.383278 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-ssh-key\") pod \"8660b1b7-7972-4b35-a50e-010de4788792\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.383359 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjsmp\" (UniqueName: \"kubernetes.io/projected/8660b1b7-7972-4b35-a50e-010de4788792-kube-api-access-bjsmp\") pod \"8660b1b7-7972-4b35-a50e-010de4788792\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.383553 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-inventory\") pod \"8660b1b7-7972-4b35-a50e-010de4788792\" (UID: \"8660b1b7-7972-4b35-a50e-010de4788792\") " Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.392714 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8660b1b7-7972-4b35-a50e-010de4788792-kube-api-access-bjsmp" (OuterVolumeSpecName: "kube-api-access-bjsmp") pod "8660b1b7-7972-4b35-a50e-010de4788792" (UID: "8660b1b7-7972-4b35-a50e-010de4788792"). InnerVolumeSpecName "kube-api-access-bjsmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.419668 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-inventory" (OuterVolumeSpecName: "inventory") pod "8660b1b7-7972-4b35-a50e-010de4788792" (UID: "8660b1b7-7972-4b35-a50e-010de4788792"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.427872 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8660b1b7-7972-4b35-a50e-010de4788792" (UID: "8660b1b7-7972-4b35-a50e-010de4788792"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.486076 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.486109 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjsmp\" (UniqueName: \"kubernetes.io/projected/8660b1b7-7972-4b35-a50e-010de4788792-kube-api-access-bjsmp\") on node \"crc\" DevicePath \"\"" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.486121 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8660b1b7-7972-4b35-a50e-010de4788792-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.538134 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:58:41 crc kubenswrapper[4768]: E1203 16:58:41.539924 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.880875 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" event={"ID":"8660b1b7-7972-4b35-a50e-010de4788792","Type":"ContainerDied","Data":"54186c6d221df18fed07a3a6e97149259e4bf17829ba344dfa48d93aa244f564"} Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.881779 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54186c6d221df18fed07a3a6e97149259e4bf17829ba344dfa48d93aa244f564" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.881323 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-7hk85" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.982921 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-f7vnp"] Dec 03 16:58:41 crc kubenswrapper[4768]: E1203 16:58:41.983732 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8660b1b7-7972-4b35-a50e-010de4788792" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.983768 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="8660b1b7-7972-4b35-a50e-010de4788792" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.984201 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="8660b1b7-7972-4b35-a50e-010de4788792" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.985673 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.989134 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.989224 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.990837 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.991934 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.994849 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-f7vnp"] Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.996921 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd8kq\" (UniqueName: \"kubernetes.io/projected/e7056232-6bbb-46d2-b15b-79dca6a43cb4-kube-api-access-wd8kq\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.997020 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:41 crc kubenswrapper[4768]: I1203 16:58:41.997357 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.099110 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.099235 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd8kq\" (UniqueName: \"kubernetes.io/projected/e7056232-6bbb-46d2-b15b-79dca6a43cb4-kube-api-access-wd8kq\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.099290 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.103938 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.105276 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.139931 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd8kq\" (UniqueName: \"kubernetes.io/projected/e7056232-6bbb-46d2-b15b-79dca6a43cb4-kube-api-access-wd8kq\") pod \"ssh-known-hosts-edpm-deployment-f7vnp\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.315675 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.870854 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-f7vnp"] Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.873679 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 16:58:42 crc kubenswrapper[4768]: I1203 16:58:42.891253 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" event={"ID":"e7056232-6bbb-46d2-b15b-79dca6a43cb4","Type":"ContainerStarted","Data":"cbde63844c4cc35bdb0b7982a1e5c2c6a56e9a3ef121669168deb555cd1ba1f9"} Dec 03 16:58:43 crc kubenswrapper[4768]: I1203 16:58:43.900429 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" event={"ID":"e7056232-6bbb-46d2-b15b-79dca6a43cb4","Type":"ContainerStarted","Data":"70a210346593b62e3909ab750d264579e44c824fbeeb415897a971ae0e200666"} Dec 03 16:58:43 crc kubenswrapper[4768]: I1203 16:58:43.927540 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" podStartSLOduration=2.470832788 podStartE2EDuration="2.927521434s" podCreationTimestamp="2025-12-03 16:58:41 +0000 UTC" firstStartedPulling="2025-12-03 16:58:42.873367349 +0000 UTC m=+2419.792703792" lastFinishedPulling="2025-12-03 16:58:43.330056005 +0000 UTC m=+2420.249392438" observedRunningTime="2025-12-03 16:58:43.916202116 +0000 UTC m=+2420.835538529" watchObservedRunningTime="2025-12-03 16:58:43.927521434 +0000 UTC m=+2420.846857857" Dec 03 16:58:51 crc kubenswrapper[4768]: I1203 16:58:51.982666 4768 generic.go:334] "Generic (PLEG): container finished" podID="e7056232-6bbb-46d2-b15b-79dca6a43cb4" containerID="70a210346593b62e3909ab750d264579e44c824fbeeb415897a971ae0e200666" exitCode=0 Dec 03 16:58:51 crc kubenswrapper[4768]: I1203 16:58:51.982770 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" event={"ID":"e7056232-6bbb-46d2-b15b-79dca6a43cb4","Type":"ContainerDied","Data":"70a210346593b62e3909ab750d264579e44c824fbeeb415897a971ae0e200666"} Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.430268 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.543630 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-ssh-key-openstack-edpm-ipam\") pod \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.544091 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-inventory-0\") pod \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.544147 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd8kq\" (UniqueName: \"kubernetes.io/projected/e7056232-6bbb-46d2-b15b-79dca6a43cb4-kube-api-access-wd8kq\") pod \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\" (UID: \"e7056232-6bbb-46d2-b15b-79dca6a43cb4\") " Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.549764 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7056232-6bbb-46d2-b15b-79dca6a43cb4-kube-api-access-wd8kq" (OuterVolumeSpecName: "kube-api-access-wd8kq") pod "e7056232-6bbb-46d2-b15b-79dca6a43cb4" (UID: "e7056232-6bbb-46d2-b15b-79dca6a43cb4"). InnerVolumeSpecName "kube-api-access-wd8kq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.574069 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e7056232-6bbb-46d2-b15b-79dca6a43cb4" (UID: "e7056232-6bbb-46d2-b15b-79dca6a43cb4"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.577932 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "e7056232-6bbb-46d2-b15b-79dca6a43cb4" (UID: "e7056232-6bbb-46d2-b15b-79dca6a43cb4"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.646409 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.646543 4768 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/e7056232-6bbb-46d2-b15b-79dca6a43cb4-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 03 16:58:53 crc kubenswrapper[4768]: I1203 16:58:53.646562 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd8kq\" (UniqueName: \"kubernetes.io/projected/e7056232-6bbb-46d2-b15b-79dca6a43cb4-kube-api-access-wd8kq\") on node \"crc\" DevicePath \"\"" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.005675 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" event={"ID":"e7056232-6bbb-46d2-b15b-79dca6a43cb4","Type":"ContainerDied","Data":"cbde63844c4cc35bdb0b7982a1e5c2c6a56e9a3ef121669168deb555cd1ba1f9"} Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.005722 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbde63844c4cc35bdb0b7982a1e5c2c6a56e9a3ef121669168deb555cd1ba1f9" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.005793 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-f7vnp" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.106172 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z"] Dec 03 16:58:54 crc kubenswrapper[4768]: E1203 16:58:54.106652 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7056232-6bbb-46d2-b15b-79dca6a43cb4" containerName="ssh-known-hosts-edpm-deployment" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.106669 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7056232-6bbb-46d2-b15b-79dca6a43cb4" containerName="ssh-known-hosts-edpm-deployment" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.106892 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7056232-6bbb-46d2-b15b-79dca6a43cb4" containerName="ssh-known-hosts-edpm-deployment" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.107747 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.111102 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.111201 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.111318 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.111507 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.126703 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z"] Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.259798 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.259871 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.259913 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkhx6\" (UniqueName: \"kubernetes.io/projected/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-kube-api-access-fkhx6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.362169 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.362260 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.362326 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkhx6\" (UniqueName: \"kubernetes.io/projected/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-kube-api-access-fkhx6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.373475 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.373531 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.383175 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkhx6\" (UniqueName: \"kubernetes.io/projected/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-kube-api-access-fkhx6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8rs7z\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:54 crc kubenswrapper[4768]: I1203 16:58:54.436688 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:58:55 crc kubenswrapper[4768]: I1203 16:58:55.007238 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z"] Dec 03 16:58:55 crc kubenswrapper[4768]: I1203 16:58:55.018129 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" event={"ID":"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f","Type":"ContainerStarted","Data":"84c61c1ba914c024454a80f2e73021cba7d33b55ee4677d5a9b9c4bdc8c61a25"} Dec 03 16:58:56 crc kubenswrapper[4768]: I1203 16:58:56.027055 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" event={"ID":"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f","Type":"ContainerStarted","Data":"78e526b3c425358a09f791363fa759f43335c47e69bf504ecb387fd4e1d0a244"} Dec 03 16:58:56 crc kubenswrapper[4768]: I1203 16:58:56.052070 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" podStartSLOduration=1.552021104 podStartE2EDuration="2.052048381s" podCreationTimestamp="2025-12-03 16:58:54 +0000 UTC" firstStartedPulling="2025-12-03 16:58:55.007126598 +0000 UTC m=+2431.926463021" lastFinishedPulling="2025-12-03 16:58:55.507153875 +0000 UTC m=+2432.426490298" observedRunningTime="2025-12-03 16:58:56.047387948 +0000 UTC m=+2432.966724411" watchObservedRunningTime="2025-12-03 16:58:56.052048381 +0000 UTC m=+2432.971384824" Dec 03 16:58:56 crc kubenswrapper[4768]: I1203 16:58:56.531834 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:58:56 crc kubenswrapper[4768]: E1203 16:58:56.532169 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:59:04 crc kubenswrapper[4768]: I1203 16:59:04.123179 4768 generic.go:334] "Generic (PLEG): container finished" podID="ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f" containerID="78e526b3c425358a09f791363fa759f43335c47e69bf504ecb387fd4e1d0a244" exitCode=0 Dec 03 16:59:04 crc kubenswrapper[4768]: I1203 16:59:04.123373 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" event={"ID":"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f","Type":"ContainerDied","Data":"78e526b3c425358a09f791363fa759f43335c47e69bf504ecb387fd4e1d0a244"} Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.701949 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.791326 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-inventory\") pod \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.791400 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkhx6\" (UniqueName: \"kubernetes.io/projected/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-kube-api-access-fkhx6\") pod \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.791448 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-ssh-key\") pod \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\" (UID: \"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f\") " Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.813161 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-kube-api-access-fkhx6" (OuterVolumeSpecName: "kube-api-access-fkhx6") pod "ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f" (UID: "ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f"). InnerVolumeSpecName "kube-api-access-fkhx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.823710 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-inventory" (OuterVolumeSpecName: "inventory") pod "ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f" (UID: "ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.932206 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.932249 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkhx6\" (UniqueName: \"kubernetes.io/projected/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-kube-api-access-fkhx6\") on node \"crc\" DevicePath \"\"" Dec 03 16:59:05 crc kubenswrapper[4768]: I1203 16:59:05.992806 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f" (UID: "ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.033939 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.147338 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" event={"ID":"ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f","Type":"ContainerDied","Data":"84c61c1ba914c024454a80f2e73021cba7d33b55ee4677d5a9b9c4bdc8c61a25"} Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.147785 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84c61c1ba914c024454a80f2e73021cba7d33b55ee4677d5a9b9c4bdc8c61a25" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.147440 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8rs7z" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.251290 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq"] Dec 03 16:59:06 crc kubenswrapper[4768]: E1203 16:59:06.251804 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.251823 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.252079 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.252884 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.257834 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.258136 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.261462 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.261585 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.263452 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq"] Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.340889 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.340936 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.341009 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxwzw\" (UniqueName: \"kubernetes.io/projected/591536f4-56e5-458a-b0f5-9a4d2effd8ff-kube-api-access-zxwzw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.442408 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxwzw\" (UniqueName: \"kubernetes.io/projected/591536f4-56e5-458a-b0f5-9a4d2effd8ff-kube-api-access-zxwzw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.442697 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.442733 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.447236 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.447272 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.458079 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxwzw\" (UniqueName: \"kubernetes.io/projected/591536f4-56e5-458a-b0f5-9a4d2effd8ff-kube-api-access-zxwzw\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:06 crc kubenswrapper[4768]: I1203 16:59:06.582572 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:07 crc kubenswrapper[4768]: I1203 16:59:07.240470 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq"] Dec 03 16:59:08 crc kubenswrapper[4768]: I1203 16:59:08.174347 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" event={"ID":"591536f4-56e5-458a-b0f5-9a4d2effd8ff","Type":"ContainerStarted","Data":"6a4a40c4be7fe312bb5ab71eb3844d66440a2a68c29726f8a668f6b10c71653f"} Dec 03 16:59:08 crc kubenswrapper[4768]: I1203 16:59:08.175106 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" event={"ID":"591536f4-56e5-458a-b0f5-9a4d2effd8ff","Type":"ContainerStarted","Data":"a6c405416f3179fe03615399b21cf5c94bedcab717e2a55a9d5b680ece3b28ce"} Dec 03 16:59:08 crc kubenswrapper[4768]: I1203 16:59:08.203687 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" podStartSLOduration=1.778385449 podStartE2EDuration="2.203580718s" podCreationTimestamp="2025-12-03 16:59:06 +0000 UTC" firstStartedPulling="2025-12-03 16:59:07.238838234 +0000 UTC m=+2444.158174687" lastFinishedPulling="2025-12-03 16:59:07.664033523 +0000 UTC m=+2444.583369956" observedRunningTime="2025-12-03 16:59:08.197290783 +0000 UTC m=+2445.116627246" watchObservedRunningTime="2025-12-03 16:59:08.203580718 +0000 UTC m=+2445.122917171" Dec 03 16:59:10 crc kubenswrapper[4768]: I1203 16:59:10.531881 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:59:10 crc kubenswrapper[4768]: E1203 16:59:10.532392 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:59:19 crc kubenswrapper[4768]: I1203 16:59:19.283895 4768 generic.go:334] "Generic (PLEG): container finished" podID="591536f4-56e5-458a-b0f5-9a4d2effd8ff" containerID="6a4a40c4be7fe312bb5ab71eb3844d66440a2a68c29726f8a668f6b10c71653f" exitCode=0 Dec 03 16:59:19 crc kubenswrapper[4768]: I1203 16:59:19.283911 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" event={"ID":"591536f4-56e5-458a-b0f5-9a4d2effd8ff","Type":"ContainerDied","Data":"6a4a40c4be7fe312bb5ab71eb3844d66440a2a68c29726f8a668f6b10c71653f"} Dec 03 16:59:20 crc kubenswrapper[4768]: I1203 16:59:20.797437 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:20 crc kubenswrapper[4768]: I1203 16:59:20.964681 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-inventory\") pod \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " Dec 03 16:59:20 crc kubenswrapper[4768]: I1203 16:59:20.964918 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxwzw\" (UniqueName: \"kubernetes.io/projected/591536f4-56e5-458a-b0f5-9a4d2effd8ff-kube-api-access-zxwzw\") pod \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " Dec 03 16:59:20 crc kubenswrapper[4768]: I1203 16:59:20.965081 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-ssh-key\") pod \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\" (UID: \"591536f4-56e5-458a-b0f5-9a4d2effd8ff\") " Dec 03 16:59:20 crc kubenswrapper[4768]: I1203 16:59:20.970973 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/591536f4-56e5-458a-b0f5-9a4d2effd8ff-kube-api-access-zxwzw" (OuterVolumeSpecName: "kube-api-access-zxwzw") pod "591536f4-56e5-458a-b0f5-9a4d2effd8ff" (UID: "591536f4-56e5-458a-b0f5-9a4d2effd8ff"). InnerVolumeSpecName "kube-api-access-zxwzw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.016163 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "591536f4-56e5-458a-b0f5-9a4d2effd8ff" (UID: "591536f4-56e5-458a-b0f5-9a4d2effd8ff"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.019394 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-inventory" (OuterVolumeSpecName: "inventory") pod "591536f4-56e5-458a-b0f5-9a4d2effd8ff" (UID: "591536f4-56e5-458a-b0f5-9a4d2effd8ff"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.069132 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxwzw\" (UniqueName: \"kubernetes.io/projected/591536f4-56e5-458a-b0f5-9a4d2effd8ff-kube-api-access-zxwzw\") on node \"crc\" DevicePath \"\"" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.069184 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.069204 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/591536f4-56e5-458a-b0f5-9a4d2effd8ff-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.310138 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" event={"ID":"591536f4-56e5-458a-b0f5-9a4d2effd8ff","Type":"ContainerDied","Data":"a6c405416f3179fe03615399b21cf5c94bedcab717e2a55a9d5b680ece3b28ce"} Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.310187 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6c405416f3179fe03615399b21cf5c94bedcab717e2a55a9d5b680ece3b28ce" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.310269 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.449029 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92"] Dec 03 16:59:21 crc kubenswrapper[4768]: E1203 16:59:21.449523 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="591536f4-56e5-458a-b0f5-9a4d2effd8ff" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.449540 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="591536f4-56e5-458a-b0f5-9a4d2effd8ff" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.449813 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="591536f4-56e5-458a-b0f5-9a4d2effd8ff" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.450668 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.462135 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.462540 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.462706 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.462794 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.462882 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.462796 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.462713 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.463089 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.464951 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92"] Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.577765 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578040 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578080 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578121 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578299 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578376 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578464 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578519 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578738 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578828 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578920 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.578987 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d266d\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-kube-api-access-d266d\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.579054 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.579165 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.681632 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.681715 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d266d\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-kube-api-access-d266d\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.681747 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.681821 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.681857 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.681926 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.681960 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.682020 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.682087 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.682111 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.682139 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.682168 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.682238 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.682283 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.688963 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.689794 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.690476 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.698285 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.700163 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.701204 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.701929 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.703080 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.703353 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.703452 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.704543 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.705092 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.705550 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.707370 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d266d\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-kube-api-access-d266d\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-lmm92\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:21 crc kubenswrapper[4768]: I1203 16:59:21.780639 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 16:59:22 crc kubenswrapper[4768]: I1203 16:59:22.375892 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92"] Dec 03 16:59:23 crc kubenswrapper[4768]: I1203 16:59:23.335723 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" event={"ID":"f4bca08e-ad57-49ce-8fd2-29262a619a67","Type":"ContainerStarted","Data":"cba92bc08c197bf6eaecda596c32af5fa9709a358d21e4b8e345365379175b37"} Dec 03 16:59:23 crc kubenswrapper[4768]: I1203 16:59:23.336081 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" event={"ID":"f4bca08e-ad57-49ce-8fd2-29262a619a67","Type":"ContainerStarted","Data":"2bacca19b791f0a269a468a76030ec62a436c1268018c714cb3d7b37c0dada77"} Dec 03 16:59:23 crc kubenswrapper[4768]: I1203 16:59:23.355488 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" podStartSLOduration=1.9474768519999999 podStartE2EDuration="2.355472689s" podCreationTimestamp="2025-12-03 16:59:21 +0000 UTC" firstStartedPulling="2025-12-03 16:59:22.386206425 +0000 UTC m=+2459.305542888" lastFinishedPulling="2025-12-03 16:59:22.794202302 +0000 UTC m=+2459.713538725" observedRunningTime="2025-12-03 16:59:23.353642421 +0000 UTC m=+2460.272978864" watchObservedRunningTime="2025-12-03 16:59:23.355472689 +0000 UTC m=+2460.274809112" Dec 03 16:59:25 crc kubenswrapper[4768]: I1203 16:59:25.532108 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:59:25 crc kubenswrapper[4768]: E1203 16:59:25.532933 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:59:38 crc kubenswrapper[4768]: I1203 16:59:38.532545 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:59:38 crc kubenswrapper[4768]: E1203 16:59:38.534354 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 16:59:49 crc kubenswrapper[4768]: I1203 16:59:49.532662 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 16:59:49 crc kubenswrapper[4768]: E1203 16:59:49.534633 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.171952 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc"] Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.175462 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.178660 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.180168 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.187120 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc"] Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.279686 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75tbz\" (UniqueName: \"kubernetes.io/projected/3d9dbab1-4992-4718-acf4-91519c1b6c98-kube-api-access-75tbz\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.280164 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d9dbab1-4992-4718-acf4-91519c1b6c98-config-volume\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.280204 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d9dbab1-4992-4718-acf4-91519c1b6c98-secret-volume\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.383586 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75tbz\" (UniqueName: \"kubernetes.io/projected/3d9dbab1-4992-4718-acf4-91519c1b6c98-kube-api-access-75tbz\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.383717 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d9dbab1-4992-4718-acf4-91519c1b6c98-config-volume\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.383783 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d9dbab1-4992-4718-acf4-91519c1b6c98-secret-volume\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.385554 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d9dbab1-4992-4718-acf4-91519c1b6c98-config-volume\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.394584 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d9dbab1-4992-4718-acf4-91519c1b6c98-secret-volume\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.415588 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75tbz\" (UniqueName: \"kubernetes.io/projected/3d9dbab1-4992-4718-acf4-91519c1b6c98-kube-api-access-75tbz\") pod \"collect-profiles-29413020-xk6hc\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:00 crc kubenswrapper[4768]: I1203 17:00:00.515570 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:01 crc kubenswrapper[4768]: I1203 17:00:01.065810 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc"] Dec 03 17:00:01 crc kubenswrapper[4768]: I1203 17:00:01.739622 4768 generic.go:334] "Generic (PLEG): container finished" podID="3d9dbab1-4992-4718-acf4-91519c1b6c98" containerID="2c5c08c1450a18cedc50ed09806187f516adbebcbd39c431d5eda824a97517b1" exitCode=0 Dec 03 17:00:01 crc kubenswrapper[4768]: I1203 17:00:01.740003 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" event={"ID":"3d9dbab1-4992-4718-acf4-91519c1b6c98","Type":"ContainerDied","Data":"2c5c08c1450a18cedc50ed09806187f516adbebcbd39c431d5eda824a97517b1"} Dec 03 17:00:01 crc kubenswrapper[4768]: I1203 17:00:01.740034 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" event={"ID":"3d9dbab1-4992-4718-acf4-91519c1b6c98","Type":"ContainerStarted","Data":"d429f204c952116d89fba64645dd8ecbc808d382a4fb482ea27c14053aafebdc"} Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.162877 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.252038 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75tbz\" (UniqueName: \"kubernetes.io/projected/3d9dbab1-4992-4718-acf4-91519c1b6c98-kube-api-access-75tbz\") pod \"3d9dbab1-4992-4718-acf4-91519c1b6c98\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.252209 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d9dbab1-4992-4718-acf4-91519c1b6c98-secret-volume\") pod \"3d9dbab1-4992-4718-acf4-91519c1b6c98\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.252236 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d9dbab1-4992-4718-acf4-91519c1b6c98-config-volume\") pod \"3d9dbab1-4992-4718-acf4-91519c1b6c98\" (UID: \"3d9dbab1-4992-4718-acf4-91519c1b6c98\") " Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.253454 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d9dbab1-4992-4718-acf4-91519c1b6c98-config-volume" (OuterVolumeSpecName: "config-volume") pod "3d9dbab1-4992-4718-acf4-91519c1b6c98" (UID: "3d9dbab1-4992-4718-acf4-91519c1b6c98"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.260371 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d9dbab1-4992-4718-acf4-91519c1b6c98-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3d9dbab1-4992-4718-acf4-91519c1b6c98" (UID: "3d9dbab1-4992-4718-acf4-91519c1b6c98"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.260643 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d9dbab1-4992-4718-acf4-91519c1b6c98-kube-api-access-75tbz" (OuterVolumeSpecName: "kube-api-access-75tbz") pod "3d9dbab1-4992-4718-acf4-91519c1b6c98" (UID: "3d9dbab1-4992-4718-acf4-91519c1b6c98"). InnerVolumeSpecName "kube-api-access-75tbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.354494 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75tbz\" (UniqueName: \"kubernetes.io/projected/3d9dbab1-4992-4718-acf4-91519c1b6c98-kube-api-access-75tbz\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.354543 4768 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3d9dbab1-4992-4718-acf4-91519c1b6c98-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.354554 4768 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3d9dbab1-4992-4718-acf4-91519c1b6c98-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.764760 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" event={"ID":"3d9dbab1-4992-4718-acf4-91519c1b6c98","Type":"ContainerDied","Data":"d429f204c952116d89fba64645dd8ecbc808d382a4fb482ea27c14053aafebdc"} Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.765014 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d429f204c952116d89fba64645dd8ecbc808d382a4fb482ea27c14053aafebdc" Dec 03 17:00:03 crc kubenswrapper[4768]: I1203 17:00:03.764915 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413020-xk6hc" Dec 03 17:00:04 crc kubenswrapper[4768]: I1203 17:00:04.250723 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq"] Dec 03 17:00:04 crc kubenswrapper[4768]: I1203 17:00:04.259855 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412975-rb4qq"] Dec 03 17:00:04 crc kubenswrapper[4768]: I1203 17:00:04.531934 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 17:00:04 crc kubenswrapper[4768]: E1203 17:00:04.532633 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:00:05 crc kubenswrapper[4768]: I1203 17:00:05.553543 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae0eb1d1-505d-4031-885d-652dea1526ea" path="/var/lib/kubelet/pods/ae0eb1d1-505d-4031-885d-652dea1526ea/volumes" Dec 03 17:00:06 crc kubenswrapper[4768]: I1203 17:00:06.809386 4768 generic.go:334] "Generic (PLEG): container finished" podID="f4bca08e-ad57-49ce-8fd2-29262a619a67" containerID="cba92bc08c197bf6eaecda596c32af5fa9709a358d21e4b8e345365379175b37" exitCode=0 Dec 03 17:00:06 crc kubenswrapper[4768]: I1203 17:00:06.809451 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" event={"ID":"f4bca08e-ad57-49ce-8fd2-29262a619a67","Type":"ContainerDied","Data":"cba92bc08c197bf6eaecda596c32af5fa9709a358d21e4b8e345365379175b37"} Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.425781 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571020 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-inventory\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571199 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-nova-combined-ca-bundle\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571244 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d266d\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-kube-api-access-d266d\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571289 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571400 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-libvirt-combined-ca-bundle\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571439 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-telemetry-combined-ca-bundle\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571503 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571553 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-ovn-default-certs-0\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571633 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ssh-key\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571744 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-repo-setup-combined-ca-bundle\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571890 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.571985 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-bootstrap-combined-ca-bundle\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.572054 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-neutron-metadata-combined-ca-bundle\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.572110 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ovn-combined-ca-bundle\") pod \"f4bca08e-ad57-49ce-8fd2-29262a619a67\" (UID: \"f4bca08e-ad57-49ce-8fd2-29262a619a67\") " Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.577585 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.578698 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.579050 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.579542 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.580207 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.580736 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-kube-api-access-d266d" (OuterVolumeSpecName: "kube-api-access-d266d") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "kube-api-access-d266d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.581535 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.581800 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.582285 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.587506 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.589860 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.590873 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.607906 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-inventory" (OuterVolumeSpecName: "inventory") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.627036 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f4bca08e-ad57-49ce-8fd2-29262a619a67" (UID: "f4bca08e-ad57-49ce-8fd2-29262a619a67"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687575 4768 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687628 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d266d\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-kube-api-access-d266d\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687644 4768 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687659 4768 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687673 4768 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687687 4768 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687700 4768 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687712 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687726 4768 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687740 4768 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/f4bca08e-ad57-49ce-8fd2-29262a619a67-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687753 4768 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687768 4768 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687781 4768 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.687793 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f4bca08e-ad57-49ce-8fd2-29262a619a67-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.833836 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" event={"ID":"f4bca08e-ad57-49ce-8fd2-29262a619a67","Type":"ContainerDied","Data":"2bacca19b791f0a269a468a76030ec62a436c1268018c714cb3d7b37c0dada77"} Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.834235 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bacca19b791f0a269a468a76030ec62a436c1268018c714cb3d7b37c0dada77" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.834208 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-lmm92" Dec 03 17:00:08 crc kubenswrapper[4768]: I1203 17:00:08.999444 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5"] Dec 03 17:00:09 crc kubenswrapper[4768]: E1203 17:00:09.002548 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d9dbab1-4992-4718-acf4-91519c1b6c98" containerName="collect-profiles" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.002642 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d9dbab1-4992-4718-acf4-91519c1b6c98" containerName="collect-profiles" Dec 03 17:00:09 crc kubenswrapper[4768]: E1203 17:00:09.002704 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4bca08e-ad57-49ce-8fd2-29262a619a67" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.002752 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4bca08e-ad57-49ce-8fd2-29262a619a67" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.003211 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d9dbab1-4992-4718-acf4-91519c1b6c98" containerName="collect-profiles" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.003250 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4bca08e-ad57-49ce-8fd2-29262a619a67" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.005941 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.008866 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.009321 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.009517 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.010095 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.012158 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.015588 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5"] Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.096407 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.096519 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8d6r\" (UniqueName: \"kubernetes.io/projected/0a655975-f7c9-49f9-9f76-05d58ae66f9b-kube-api-access-b8d6r\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.096647 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.096685 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.096755 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.199114 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.199191 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.199285 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.199484 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.199547 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8d6r\" (UniqueName: \"kubernetes.io/projected/0a655975-f7c9-49f9-9f76-05d58ae66f9b-kube-api-access-b8d6r\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.201013 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.205183 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.206288 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.213058 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.217363 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8d6r\" (UniqueName: \"kubernetes.io/projected/0a655975-f7c9-49f9-9f76-05d58ae66f9b-kube-api-access-b8d6r\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-t8vn5\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.331675 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:00:09 crc kubenswrapper[4768]: I1203 17:00:09.981228 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5"] Dec 03 17:00:10 crc kubenswrapper[4768]: I1203 17:00:10.854305 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" event={"ID":"0a655975-f7c9-49f9-9f76-05d58ae66f9b","Type":"ContainerStarted","Data":"07a66f80b3b09f3e9874cb82f95f66cf42f89305f2de3ea153b84d9f1d1a4813"} Dec 03 17:00:10 crc kubenswrapper[4768]: I1203 17:00:10.854938 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" event={"ID":"0a655975-f7c9-49f9-9f76-05d58ae66f9b","Type":"ContainerStarted","Data":"d11b951a5765b35d70219bc75fef878e0948084765085f7cae26af4e81d93d33"} Dec 03 17:00:10 crc kubenswrapper[4768]: I1203 17:00:10.875581 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" podStartSLOduration=2.400893668 podStartE2EDuration="2.875564487s" podCreationTimestamp="2025-12-03 17:00:08 +0000 UTC" firstStartedPulling="2025-12-03 17:00:09.990433466 +0000 UTC m=+2506.909769929" lastFinishedPulling="2025-12-03 17:00:10.465104325 +0000 UTC m=+2507.384440748" observedRunningTime="2025-12-03 17:00:10.867882015 +0000 UTC m=+2507.787218428" watchObservedRunningTime="2025-12-03 17:00:10.875564487 +0000 UTC m=+2507.794900910" Dec 03 17:00:19 crc kubenswrapper[4768]: I1203 17:00:19.087833 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="hostpath-provisioner/csi-hostpathplugin-ngrx8" podUID="3220a3d3-0321-4863-b645-5b28949d7163" containerName="hostpath-provisioner" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 03 17:00:19 crc kubenswrapper[4768]: I1203 17:00:19.532775 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 17:00:19 crc kubenswrapper[4768]: E1203 17:00:19.533076 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:00:31 crc kubenswrapper[4768]: I1203 17:00:31.532682 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 17:00:31 crc kubenswrapper[4768]: E1203 17:00:31.533936 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:00:45 crc kubenswrapper[4768]: I1203 17:00:45.532084 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 17:00:45 crc kubenswrapper[4768]: E1203 17:00:45.532979 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:00:47 crc kubenswrapper[4768]: I1203 17:00:47.947716 4768 scope.go:117] "RemoveContainer" containerID="194c8e29560223d8a34c284ca2439ab3ca804b9a6ebfb9921fd7f1ad2a619b65" Dec 03 17:00:58 crc kubenswrapper[4768]: I1203 17:00:58.532083 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 17:00:59 crc kubenswrapper[4768]: I1203 17:00:59.781081 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"3db9d0405ce9b05744291dfb1df566810f933a1e27a024d849353994efa77a58"} Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.181285 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29413021-2dppd"] Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.183343 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.195774 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29413021-2dppd"] Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.318778 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-combined-ca-bundle\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.318911 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-fernet-keys\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.318947 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-config-data\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.319315 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt8ms\" (UniqueName: \"kubernetes.io/projected/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-kube-api-access-bt8ms\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.422348 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-fernet-keys\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.422441 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-config-data\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.422741 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt8ms\" (UniqueName: \"kubernetes.io/projected/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-kube-api-access-bt8ms\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.423055 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-combined-ca-bundle\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.430399 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-fernet-keys\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.432292 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-config-data\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.436218 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-combined-ca-bundle\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.442153 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt8ms\" (UniqueName: \"kubernetes.io/projected/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-kube-api-access-bt8ms\") pod \"keystone-cron-29413021-2dppd\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:00 crc kubenswrapper[4768]: I1203 17:01:00.515555 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:01 crc kubenswrapper[4768]: I1203 17:01:01.045091 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29413021-2dppd"] Dec 03 17:01:01 crc kubenswrapper[4768]: I1203 17:01:01.805973 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413021-2dppd" event={"ID":"39c831b1-4d74-43e9-a798-a0ce0f8c9c15","Type":"ContainerStarted","Data":"23f4d67aa9bacc54bc25e11d1f3f8ba7705144f8d0f447ced8e56f4d90fddb08"} Dec 03 17:01:01 crc kubenswrapper[4768]: I1203 17:01:01.806555 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413021-2dppd" event={"ID":"39c831b1-4d74-43e9-a798-a0ce0f8c9c15","Type":"ContainerStarted","Data":"0b9fb46472367c6bbf4f2bc9201d8d1e4e163da36950c18150cf890fbb16a9f4"} Dec 03 17:01:01 crc kubenswrapper[4768]: I1203 17:01:01.829940 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29413021-2dppd" podStartSLOduration=1.82991699 podStartE2EDuration="1.82991699s" podCreationTimestamp="2025-12-03 17:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:01:01.821033376 +0000 UTC m=+2558.740369819" watchObservedRunningTime="2025-12-03 17:01:01.82991699 +0000 UTC m=+2558.749253513" Dec 03 17:01:03 crc kubenswrapper[4768]: I1203 17:01:03.828758 4768 generic.go:334] "Generic (PLEG): container finished" podID="39c831b1-4d74-43e9-a798-a0ce0f8c9c15" containerID="23f4d67aa9bacc54bc25e11d1f3f8ba7705144f8d0f447ced8e56f4d90fddb08" exitCode=0 Dec 03 17:01:03 crc kubenswrapper[4768]: I1203 17:01:03.828859 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413021-2dppd" event={"ID":"39c831b1-4d74-43e9-a798-a0ce0f8c9c15","Type":"ContainerDied","Data":"23f4d67aa9bacc54bc25e11d1f3f8ba7705144f8d0f447ced8e56f4d90fddb08"} Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.298449 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.362621 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-combined-ca-bundle\") pod \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.362747 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-fernet-keys\") pod \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.362830 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-config-data\") pod \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.362906 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt8ms\" (UniqueName: \"kubernetes.io/projected/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-kube-api-access-bt8ms\") pod \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\" (UID: \"39c831b1-4d74-43e9-a798-a0ce0f8c9c15\") " Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.370444 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "39c831b1-4d74-43e9-a798-a0ce0f8c9c15" (UID: "39c831b1-4d74-43e9-a798-a0ce0f8c9c15"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.370658 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-kube-api-access-bt8ms" (OuterVolumeSpecName: "kube-api-access-bt8ms") pod "39c831b1-4d74-43e9-a798-a0ce0f8c9c15" (UID: "39c831b1-4d74-43e9-a798-a0ce0f8c9c15"). InnerVolumeSpecName "kube-api-access-bt8ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.424380 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-config-data" (OuterVolumeSpecName: "config-data") pod "39c831b1-4d74-43e9-a798-a0ce0f8c9c15" (UID: "39c831b1-4d74-43e9-a798-a0ce0f8c9c15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.432583 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39c831b1-4d74-43e9-a798-a0ce0f8c9c15" (UID: "39c831b1-4d74-43e9-a798-a0ce0f8c9c15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.466083 4768 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.466129 4768 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.466139 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.466149 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt8ms\" (UniqueName: \"kubernetes.io/projected/39c831b1-4d74-43e9-a798-a0ce0f8c9c15-kube-api-access-bt8ms\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.848205 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29413021-2dppd" event={"ID":"39c831b1-4d74-43e9-a798-a0ce0f8c9c15","Type":"ContainerDied","Data":"0b9fb46472367c6bbf4f2bc9201d8d1e4e163da36950c18150cf890fbb16a9f4"} Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.848663 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b9fb46472367c6bbf4f2bc9201d8d1e4e163da36950c18150cf890fbb16a9f4" Dec 03 17:01:05 crc kubenswrapper[4768]: I1203 17:01:05.848303 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29413021-2dppd" Dec 03 17:01:21 crc kubenswrapper[4768]: I1203 17:01:21.034338 4768 generic.go:334] "Generic (PLEG): container finished" podID="0a655975-f7c9-49f9-9f76-05d58ae66f9b" containerID="07a66f80b3b09f3e9874cb82f95f66cf42f89305f2de3ea153b84d9f1d1a4813" exitCode=0 Dec 03 17:01:21 crc kubenswrapper[4768]: I1203 17:01:21.034467 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" event={"ID":"0a655975-f7c9-49f9-9f76-05d58ae66f9b","Type":"ContainerDied","Data":"07a66f80b3b09f3e9874cb82f95f66cf42f89305f2de3ea153b84d9f1d1a4813"} Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.601988 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.759350 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ssh-key\") pod \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.759501 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovncontroller-config-0\") pod \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.759649 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-inventory\") pod \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.759689 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8d6r\" (UniqueName: \"kubernetes.io/projected/0a655975-f7c9-49f9-9f76-05d58ae66f9b-kube-api-access-b8d6r\") pod \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.759870 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovn-combined-ca-bundle\") pod \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\" (UID: \"0a655975-f7c9-49f9-9f76-05d58ae66f9b\") " Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.766009 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a655975-f7c9-49f9-9f76-05d58ae66f9b-kube-api-access-b8d6r" (OuterVolumeSpecName: "kube-api-access-b8d6r") pod "0a655975-f7c9-49f9-9f76-05d58ae66f9b" (UID: "0a655975-f7c9-49f9-9f76-05d58ae66f9b"). InnerVolumeSpecName "kube-api-access-b8d6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.768433 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "0a655975-f7c9-49f9-9f76-05d58ae66f9b" (UID: "0a655975-f7c9-49f9-9f76-05d58ae66f9b"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.796475 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0a655975-f7c9-49f9-9f76-05d58ae66f9b" (UID: "0a655975-f7c9-49f9-9f76-05d58ae66f9b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.797989 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-inventory" (OuterVolumeSpecName: "inventory") pod "0a655975-f7c9-49f9-9f76-05d58ae66f9b" (UID: "0a655975-f7c9-49f9-9f76-05d58ae66f9b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.800554 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "0a655975-f7c9-49f9-9f76-05d58ae66f9b" (UID: "0a655975-f7c9-49f9-9f76-05d58ae66f9b"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.867130 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.867190 4768 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.867214 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.867234 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8d6r\" (UniqueName: \"kubernetes.io/projected/0a655975-f7c9-49f9-9f76-05d58ae66f9b-kube-api-access-b8d6r\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:22 crc kubenswrapper[4768]: I1203 17:01:22.867255 4768 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a655975-f7c9-49f9-9f76-05d58ae66f9b-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.057647 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" event={"ID":"0a655975-f7c9-49f9-9f76-05d58ae66f9b","Type":"ContainerDied","Data":"d11b951a5765b35d70219bc75fef878e0948084765085f7cae26af4e81d93d33"} Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.057684 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d11b951a5765b35d70219bc75fef878e0948084765085f7cae26af4e81d93d33" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.057773 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-t8vn5" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.213042 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn"] Dec 03 17:01:23 crc kubenswrapper[4768]: E1203 17:01:23.213430 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39c831b1-4d74-43e9-a798-a0ce0f8c9c15" containerName="keystone-cron" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.213446 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="39c831b1-4d74-43e9-a798-a0ce0f8c9c15" containerName="keystone-cron" Dec 03 17:01:23 crc kubenswrapper[4768]: E1203 17:01:23.213456 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a655975-f7c9-49f9-9f76-05d58ae66f9b" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.213462 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a655975-f7c9-49f9-9f76-05d58ae66f9b" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.213675 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="39c831b1-4d74-43e9-a798-a0ce0f8c9c15" containerName="keystone-cron" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.213694 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a655975-f7c9-49f9-9f76-05d58ae66f9b" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.214396 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.219733 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.219778 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.219953 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.219986 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.220130 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.220870 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.229179 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn"] Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.381865 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.382055 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.382324 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.382459 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvfw7\" (UniqueName: \"kubernetes.io/projected/b4046334-7016-451c-b6d8-ad389cca206a-kube-api-access-qvfw7\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.382631 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.382749 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.484054 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.484166 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.484208 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvfw7\" (UniqueName: \"kubernetes.io/projected/b4046334-7016-451c-b6d8-ad389cca206a-kube-api-access-qvfw7\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.484255 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.484299 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.484362 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.488251 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.489060 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.489584 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.490121 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.492298 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.513395 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvfw7\" (UniqueName: \"kubernetes.io/projected/b4046334-7016-451c-b6d8-ad389cca206a-kube-api-access-qvfw7\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.550296 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 17:01:23 crc kubenswrapper[4768]: I1203 17:01:23.557679 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:01:24 crc kubenswrapper[4768]: I1203 17:01:24.170179 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn"] Dec 03 17:01:24 crc kubenswrapper[4768]: I1203 17:01:24.692914 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 17:01:25 crc kubenswrapper[4768]: I1203 17:01:25.096107 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" event={"ID":"b4046334-7016-451c-b6d8-ad389cca206a","Type":"ContainerStarted","Data":"19e2da4cbe20d5c722836b7e9e968414dc3f57fa318a4b3b82b21f46fe981c92"} Dec 03 17:01:25 crc kubenswrapper[4768]: I1203 17:01:25.096430 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" event={"ID":"b4046334-7016-451c-b6d8-ad389cca206a","Type":"ContainerStarted","Data":"21877c926c2d543ed54e2c47a57ec146799415809e8f43d257b77322c73e939d"} Dec 03 17:01:25 crc kubenswrapper[4768]: I1203 17:01:25.120069 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" podStartSLOduration=1.632815458 podStartE2EDuration="2.120040607s" podCreationTimestamp="2025-12-03 17:01:23 +0000 UTC" firstStartedPulling="2025-12-03 17:01:24.202071433 +0000 UTC m=+2581.121407856" lastFinishedPulling="2025-12-03 17:01:24.689296582 +0000 UTC m=+2581.608633005" observedRunningTime="2025-12-03 17:01:25.113190881 +0000 UTC m=+2582.032527314" watchObservedRunningTime="2025-12-03 17:01:25.120040607 +0000 UTC m=+2582.039377030" Dec 03 17:02:15 crc kubenswrapper[4768]: I1203 17:02:15.717310 4768 generic.go:334] "Generic (PLEG): container finished" podID="b4046334-7016-451c-b6d8-ad389cca206a" containerID="19e2da4cbe20d5c722836b7e9e968414dc3f57fa318a4b3b82b21f46fe981c92" exitCode=0 Dec 03 17:02:15 crc kubenswrapper[4768]: I1203 17:02:15.717388 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" event={"ID":"b4046334-7016-451c-b6d8-ad389cca206a","Type":"ContainerDied","Data":"19e2da4cbe20d5c722836b7e9e968414dc3f57fa318a4b3b82b21f46fe981c92"} Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.165038 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.329480 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-nova-metadata-neutron-config-0\") pod \"b4046334-7016-451c-b6d8-ad389cca206a\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.329690 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-ovn-metadata-agent-neutron-config-0\") pod \"b4046334-7016-451c-b6d8-ad389cca206a\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.330627 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvfw7\" (UniqueName: \"kubernetes.io/projected/b4046334-7016-451c-b6d8-ad389cca206a-kube-api-access-qvfw7\") pod \"b4046334-7016-451c-b6d8-ad389cca206a\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.330799 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-metadata-combined-ca-bundle\") pod \"b4046334-7016-451c-b6d8-ad389cca206a\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.330874 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-ssh-key\") pod \"b4046334-7016-451c-b6d8-ad389cca206a\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.330975 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-inventory\") pod \"b4046334-7016-451c-b6d8-ad389cca206a\" (UID: \"b4046334-7016-451c-b6d8-ad389cca206a\") " Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.335808 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b4046334-7016-451c-b6d8-ad389cca206a" (UID: "b4046334-7016-451c-b6d8-ad389cca206a"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.336031 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4046334-7016-451c-b6d8-ad389cca206a-kube-api-access-qvfw7" (OuterVolumeSpecName: "kube-api-access-qvfw7") pod "b4046334-7016-451c-b6d8-ad389cca206a" (UID: "b4046334-7016-451c-b6d8-ad389cca206a"). InnerVolumeSpecName "kube-api-access-qvfw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.367825 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "b4046334-7016-451c-b6d8-ad389cca206a" (UID: "b4046334-7016-451c-b6d8-ad389cca206a"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.369088 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-inventory" (OuterVolumeSpecName: "inventory") pod "b4046334-7016-451c-b6d8-ad389cca206a" (UID: "b4046334-7016-451c-b6d8-ad389cca206a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.371839 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "b4046334-7016-451c-b6d8-ad389cca206a" (UID: "b4046334-7016-451c-b6d8-ad389cca206a"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.374230 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b4046334-7016-451c-b6d8-ad389cca206a" (UID: "b4046334-7016-451c-b6d8-ad389cca206a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.433340 4768 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.433372 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.433384 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.433394 4768 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.433405 4768 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/b4046334-7016-451c-b6d8-ad389cca206a-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.433417 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvfw7\" (UniqueName: \"kubernetes.io/projected/b4046334-7016-451c-b6d8-ad389cca206a-kube-api-access-qvfw7\") on node \"crc\" DevicePath \"\"" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.736756 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" event={"ID":"b4046334-7016-451c-b6d8-ad389cca206a","Type":"ContainerDied","Data":"21877c926c2d543ed54e2c47a57ec146799415809e8f43d257b77322c73e939d"} Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.736802 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21877c926c2d543ed54e2c47a57ec146799415809e8f43d257b77322c73e939d" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.736818 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.900919 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp"] Dec 03 17:02:17 crc kubenswrapper[4768]: E1203 17:02:17.901678 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4046334-7016-451c-b6d8-ad389cca206a" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.901709 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4046334-7016-451c-b6d8-ad389cca206a" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.902045 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4046334-7016-451c-b6d8-ad389cca206a" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.903345 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.905773 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.905937 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.906262 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.906391 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.906554 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.951086 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp"] Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.951330 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.952910 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.953922 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dmls\" (UniqueName: \"kubernetes.io/projected/2025631a-ad01-494e-a78d-095aaedfa302-kube-api-access-7dmls\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.954188 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:17 crc kubenswrapper[4768]: I1203 17:02:17.954233 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.057208 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.057419 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.057453 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dmls\" (UniqueName: \"kubernetes.io/projected/2025631a-ad01-494e-a78d-095aaedfa302-kube-api-access-7dmls\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.057551 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.057608 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.062019 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.062883 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.063777 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.065733 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.077717 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dmls\" (UniqueName: \"kubernetes.io/projected/2025631a-ad01-494e-a78d-095aaedfa302-kube-api-access-7dmls\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.249779 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:02:18 crc kubenswrapper[4768]: I1203 17:02:18.833491 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp"] Dec 03 17:02:18 crc kubenswrapper[4768]: W1203 17:02:18.835358 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2025631a_ad01_494e_a78d_095aaedfa302.slice/crio-fd61e34a07e357625e7734ff5253680e07426d44620031ac4a1db15edde06fa8 WatchSource:0}: Error finding container fd61e34a07e357625e7734ff5253680e07426d44620031ac4a1db15edde06fa8: Status 404 returned error can't find the container with id fd61e34a07e357625e7734ff5253680e07426d44620031ac4a1db15edde06fa8 Dec 03 17:02:19 crc kubenswrapper[4768]: I1203 17:02:19.758828 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" event={"ID":"2025631a-ad01-494e-a78d-095aaedfa302","Type":"ContainerStarted","Data":"cc7490c33551c7e7168660f0f0178987719e28d41ba24ba0431aabdf88c9df51"} Dec 03 17:02:19 crc kubenswrapper[4768]: I1203 17:02:19.759198 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" event={"ID":"2025631a-ad01-494e-a78d-095aaedfa302","Type":"ContainerStarted","Data":"fd61e34a07e357625e7734ff5253680e07426d44620031ac4a1db15edde06fa8"} Dec 03 17:02:19 crc kubenswrapper[4768]: I1203 17:02:19.772713 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" podStartSLOduration=2.341533051 podStartE2EDuration="2.772694947s" podCreationTimestamp="2025-12-03 17:02:17 +0000 UTC" firstStartedPulling="2025-12-03 17:02:18.841040643 +0000 UTC m=+2635.760377076" lastFinishedPulling="2025-12-03 17:02:19.272202549 +0000 UTC m=+2636.191538972" observedRunningTime="2025-12-03 17:02:19.771479103 +0000 UTC m=+2636.690815556" watchObservedRunningTime="2025-12-03 17:02:19.772694947 +0000 UTC m=+2636.692031370" Dec 03 17:03:26 crc kubenswrapper[4768]: I1203 17:03:26.027999 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:03:26 crc kubenswrapper[4768]: I1203 17:03:26.028684 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:03:56 crc kubenswrapper[4768]: I1203 17:03:56.028161 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:03:56 crc kubenswrapper[4768]: I1203 17:03:56.028743 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.497547 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m56xg"] Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.503386 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.520328 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m56xg"] Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.653347 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-catalog-content\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.653440 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wm9w\" (UniqueName: \"kubernetes.io/projected/89a9c706-6a04-43d2-8672-601f29f07e04-kube-api-access-4wm9w\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.653567 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-utilities\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.755432 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-utilities\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.755564 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-catalog-content\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.755674 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wm9w\" (UniqueName: \"kubernetes.io/projected/89a9c706-6a04-43d2-8672-601f29f07e04-kube-api-access-4wm9w\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.756492 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-utilities\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.756785 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-catalog-content\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.777973 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wm9w\" (UniqueName: \"kubernetes.io/projected/89a9c706-6a04-43d2-8672-601f29f07e04-kube-api-access-4wm9w\") pod \"redhat-operators-m56xg\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:04 crc kubenswrapper[4768]: I1203 17:04:04.843120 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:05 crc kubenswrapper[4768]: I1203 17:04:05.327736 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m56xg"] Dec 03 17:04:05 crc kubenswrapper[4768]: I1203 17:04:05.873216 4768 generic.go:334] "Generic (PLEG): container finished" podID="89a9c706-6a04-43d2-8672-601f29f07e04" containerID="7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61" exitCode=0 Dec 03 17:04:05 crc kubenswrapper[4768]: I1203 17:04:05.873311 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m56xg" event={"ID":"89a9c706-6a04-43d2-8672-601f29f07e04","Type":"ContainerDied","Data":"7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61"} Dec 03 17:04:05 crc kubenswrapper[4768]: I1203 17:04:05.873492 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m56xg" event={"ID":"89a9c706-6a04-43d2-8672-601f29f07e04","Type":"ContainerStarted","Data":"c7fd83257345a358ce2ddabac2c0fb9b83887c2342e5483ad635d8da323dcb86"} Dec 03 17:04:05 crc kubenswrapper[4768]: I1203 17:04:05.875685 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:04:06 crc kubenswrapper[4768]: I1203 17:04:06.886021 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m56xg" event={"ID":"89a9c706-6a04-43d2-8672-601f29f07e04","Type":"ContainerStarted","Data":"39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8"} Dec 03 17:04:10 crc kubenswrapper[4768]: I1203 17:04:10.934077 4768 generic.go:334] "Generic (PLEG): container finished" podID="89a9c706-6a04-43d2-8672-601f29f07e04" containerID="39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8" exitCode=0 Dec 03 17:04:10 crc kubenswrapper[4768]: I1203 17:04:10.934186 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m56xg" event={"ID":"89a9c706-6a04-43d2-8672-601f29f07e04","Type":"ContainerDied","Data":"39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8"} Dec 03 17:04:11 crc kubenswrapper[4768]: I1203 17:04:11.946681 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m56xg" event={"ID":"89a9c706-6a04-43d2-8672-601f29f07e04","Type":"ContainerStarted","Data":"213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc"} Dec 03 17:04:11 crc kubenswrapper[4768]: I1203 17:04:11.973780 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m56xg" podStartSLOduration=2.531924153 podStartE2EDuration="7.973765396s" podCreationTimestamp="2025-12-03 17:04:04 +0000 UTC" firstStartedPulling="2025-12-03 17:04:05.875383343 +0000 UTC m=+2742.794719766" lastFinishedPulling="2025-12-03 17:04:11.317224586 +0000 UTC m=+2748.236561009" observedRunningTime="2025-12-03 17:04:11.972280077 +0000 UTC m=+2748.891616500" watchObservedRunningTime="2025-12-03 17:04:11.973765396 +0000 UTC m=+2748.893101819" Dec 03 17:04:14 crc kubenswrapper[4768]: I1203 17:04:14.843397 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:14 crc kubenswrapper[4768]: I1203 17:04:14.843736 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:15 crc kubenswrapper[4768]: I1203 17:04:15.903938 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m56xg" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="registry-server" probeResult="failure" output=< Dec 03 17:04:15 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 17:04:15 crc kubenswrapper[4768]: > Dec 03 17:04:24 crc kubenswrapper[4768]: I1203 17:04:24.896497 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:24 crc kubenswrapper[4768]: I1203 17:04:24.965128 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:25 crc kubenswrapper[4768]: I1203 17:04:25.147918 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m56xg"] Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.028582 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.028707 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.028799 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.029867 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3db9d0405ce9b05744291dfb1df566810f933a1e27a024d849353994efa77a58"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.029966 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://3db9d0405ce9b05744291dfb1df566810f933a1e27a024d849353994efa77a58" gracePeriod=600 Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.118135 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m56xg" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="registry-server" containerID="cri-o://213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc" gracePeriod=2 Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.755951 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.893675 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-utilities\") pod \"89a9c706-6a04-43d2-8672-601f29f07e04\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.893971 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wm9w\" (UniqueName: \"kubernetes.io/projected/89a9c706-6a04-43d2-8672-601f29f07e04-kube-api-access-4wm9w\") pod \"89a9c706-6a04-43d2-8672-601f29f07e04\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.894068 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-catalog-content\") pod \"89a9c706-6a04-43d2-8672-601f29f07e04\" (UID: \"89a9c706-6a04-43d2-8672-601f29f07e04\") " Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.894531 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-utilities" (OuterVolumeSpecName: "utilities") pod "89a9c706-6a04-43d2-8672-601f29f07e04" (UID: "89a9c706-6a04-43d2-8672-601f29f07e04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.894858 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.900443 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89a9c706-6a04-43d2-8672-601f29f07e04-kube-api-access-4wm9w" (OuterVolumeSpecName: "kube-api-access-4wm9w") pod "89a9c706-6a04-43d2-8672-601f29f07e04" (UID: "89a9c706-6a04-43d2-8672-601f29f07e04"). InnerVolumeSpecName "kube-api-access-4wm9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:04:26 crc kubenswrapper[4768]: I1203 17:04:26.996887 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wm9w\" (UniqueName: \"kubernetes.io/projected/89a9c706-6a04-43d2-8672-601f29f07e04-kube-api-access-4wm9w\") on node \"crc\" DevicePath \"\"" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.023737 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89a9c706-6a04-43d2-8672-601f29f07e04" (UID: "89a9c706-6a04-43d2-8672-601f29f07e04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.099120 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89a9c706-6a04-43d2-8672-601f29f07e04-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.129541 4768 generic.go:334] "Generic (PLEG): container finished" podID="89a9c706-6a04-43d2-8672-601f29f07e04" containerID="213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc" exitCode=0 Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.129783 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m56xg" event={"ID":"89a9c706-6a04-43d2-8672-601f29f07e04","Type":"ContainerDied","Data":"213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc"} Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.129869 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m56xg" event={"ID":"89a9c706-6a04-43d2-8672-601f29f07e04","Type":"ContainerDied","Data":"c7fd83257345a358ce2ddabac2c0fb9b83887c2342e5483ad635d8da323dcb86"} Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.129877 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m56xg" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.129890 4768 scope.go:117] "RemoveContainer" containerID="213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.133288 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="3db9d0405ce9b05744291dfb1df566810f933a1e27a024d849353994efa77a58" exitCode=0 Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.133318 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"3db9d0405ce9b05744291dfb1df566810f933a1e27a024d849353994efa77a58"} Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.133347 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb"} Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.159229 4768 scope.go:117] "RemoveContainer" containerID="39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.193374 4768 scope.go:117] "RemoveContainer" containerID="7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.238805 4768 scope.go:117] "RemoveContainer" containerID="213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc" Dec 03 17:04:27 crc kubenswrapper[4768]: E1203 17:04:27.239325 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc\": container with ID starting with 213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc not found: ID does not exist" containerID="213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.239382 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc"} err="failed to get container status \"213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc\": rpc error: code = NotFound desc = could not find container \"213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc\": container with ID starting with 213896b3f61e683a6da6c12abb58592b4f6bea100023fa98d6a250face9405bc not found: ID does not exist" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.239410 4768 scope.go:117] "RemoveContainer" containerID="39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8" Dec 03 17:04:27 crc kubenswrapper[4768]: E1203 17:04:27.239758 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8\": container with ID starting with 39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8 not found: ID does not exist" containerID="39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.239810 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8"} err="failed to get container status \"39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8\": rpc error: code = NotFound desc = could not find container \"39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8\": container with ID starting with 39df1559d4ec5446ba7f9963cfe2919001c8539bef28fef224adfbd356fc0ed8 not found: ID does not exist" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.239842 4768 scope.go:117] "RemoveContainer" containerID="7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61" Dec 03 17:04:27 crc kubenswrapper[4768]: E1203 17:04:27.240198 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61\": container with ID starting with 7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61 not found: ID does not exist" containerID="7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.240223 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61"} err="failed to get container status \"7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61\": rpc error: code = NotFound desc = could not find container \"7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61\": container with ID starting with 7081cfca094f68461b30c9a056d0f102e67b1da233a10d36e2ad1571c6ce9b61 not found: ID does not exist" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.240243 4768 scope.go:117] "RemoveContainer" containerID="70d1067c664a1dc8670662da58823c324e23eda0d878611d72715b0584c2cca4" Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.260669 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m56xg"] Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.262062 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m56xg"] Dec 03 17:04:27 crc kubenswrapper[4768]: I1203 17:04:27.545870 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" path="/var/lib/kubelet/pods/89a9c706-6a04-43d2-8672-601f29f07e04/volumes" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.846925 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vl5v6"] Dec 03 17:05:01 crc kubenswrapper[4768]: E1203 17:05:01.848226 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="extract-content" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.848269 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="extract-content" Dec 03 17:05:01 crc kubenswrapper[4768]: E1203 17:05:01.848337 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="extract-utilities" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.848351 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="extract-utilities" Dec 03 17:05:01 crc kubenswrapper[4768]: E1203 17:05:01.848376 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="registry-server" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.848387 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="registry-server" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.848774 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="89a9c706-6a04-43d2-8672-601f29f07e04" containerName="registry-server" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.851483 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.861767 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vl5v6"] Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.905940 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-utilities\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.906225 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4dsp\" (UniqueName: \"kubernetes.io/projected/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-kube-api-access-t4dsp\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:01 crc kubenswrapper[4768]: I1203 17:05:01.906831 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-catalog-content\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.009212 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-utilities\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.009789 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4dsp\" (UniqueName: \"kubernetes.io/projected/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-kube-api-access-t4dsp\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.010101 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-catalog-content\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.010415 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-utilities\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.010451 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-catalog-content\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.029754 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4dsp\" (UniqueName: \"kubernetes.io/projected/5897d8d4-04ee-4f56-81c1-bdcd96028ec4-kube-api-access-t4dsp\") pod \"community-operators-vl5v6\" (UID: \"5897d8d4-04ee-4f56-81c1-bdcd96028ec4\") " pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.175465 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:02 crc kubenswrapper[4768]: I1203 17:05:02.804990 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vl5v6"] Dec 03 17:05:03 crc kubenswrapper[4768]: I1203 17:05:03.544921 4768 generic.go:334] "Generic (PLEG): container finished" podID="5897d8d4-04ee-4f56-81c1-bdcd96028ec4" containerID="2590c8184371f833862c514a85c29b78e835f15e3eced8c0b7d4fe2ca9494e39" exitCode=0 Dec 03 17:05:03 crc kubenswrapper[4768]: I1203 17:05:03.548759 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vl5v6" event={"ID":"5897d8d4-04ee-4f56-81c1-bdcd96028ec4","Type":"ContainerDied","Data":"2590c8184371f833862c514a85c29b78e835f15e3eced8c0b7d4fe2ca9494e39"} Dec 03 17:05:03 crc kubenswrapper[4768]: I1203 17:05:03.548844 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vl5v6" event={"ID":"5897d8d4-04ee-4f56-81c1-bdcd96028ec4","Type":"ContainerStarted","Data":"763786126f20fdb12b6482626cc5a3876bb59891f9f02abc781643d22a3b3fa3"} Dec 03 17:05:08 crc kubenswrapper[4768]: I1203 17:05:08.635304 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vl5v6" event={"ID":"5897d8d4-04ee-4f56-81c1-bdcd96028ec4","Type":"ContainerStarted","Data":"00e7ff3fad6b64353c984dd12614db4452f4c4d65f1ca11b0002ab5e055d6518"} Dec 03 17:05:09 crc kubenswrapper[4768]: I1203 17:05:09.646313 4768 generic.go:334] "Generic (PLEG): container finished" podID="5897d8d4-04ee-4f56-81c1-bdcd96028ec4" containerID="00e7ff3fad6b64353c984dd12614db4452f4c4d65f1ca11b0002ab5e055d6518" exitCode=0 Dec 03 17:05:09 crc kubenswrapper[4768]: I1203 17:05:09.646361 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vl5v6" event={"ID":"5897d8d4-04ee-4f56-81c1-bdcd96028ec4","Type":"ContainerDied","Data":"00e7ff3fad6b64353c984dd12614db4452f4c4d65f1ca11b0002ab5e055d6518"} Dec 03 17:05:10 crc kubenswrapper[4768]: I1203 17:05:10.658314 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vl5v6" event={"ID":"5897d8d4-04ee-4f56-81c1-bdcd96028ec4","Type":"ContainerStarted","Data":"4d0481df3d41eab7c756ff1b035e01ea18713563b4924b324f9eef10e2ef13ed"} Dec 03 17:05:10 crc kubenswrapper[4768]: I1203 17:05:10.683053 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vl5v6" podStartSLOduration=2.922837704 podStartE2EDuration="9.683029247s" podCreationTimestamp="2025-12-03 17:05:01 +0000 UTC" firstStartedPulling="2025-12-03 17:05:03.547568007 +0000 UTC m=+2800.466904440" lastFinishedPulling="2025-12-03 17:05:10.30775956 +0000 UTC m=+2807.227095983" observedRunningTime="2025-12-03 17:05:10.674768306 +0000 UTC m=+2807.594104729" watchObservedRunningTime="2025-12-03 17:05:10.683029247 +0000 UTC m=+2807.602365670" Dec 03 17:05:12 crc kubenswrapper[4768]: I1203 17:05:12.176965 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:12 crc kubenswrapper[4768]: I1203 17:05:12.177230 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:13 crc kubenswrapper[4768]: I1203 17:05:13.237159 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-vl5v6" podUID="5897d8d4-04ee-4f56-81c1-bdcd96028ec4" containerName="registry-server" probeResult="failure" output=< Dec 03 17:05:13 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 17:05:13 crc kubenswrapper[4768]: > Dec 03 17:05:22 crc kubenswrapper[4768]: I1203 17:05:22.229065 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:22 crc kubenswrapper[4768]: I1203 17:05:22.278458 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vl5v6" Dec 03 17:05:22 crc kubenswrapper[4768]: I1203 17:05:22.349756 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vl5v6"] Dec 03 17:05:22 crc kubenswrapper[4768]: I1203 17:05:22.489931 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45zmq"] Dec 03 17:05:22 crc kubenswrapper[4768]: I1203 17:05:22.490254 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-45zmq" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="registry-server" containerID="cri-o://49c0099eecd6eb30db379a06867a122cb67adffc90da4702f4fe6b6a4216e177" gracePeriod=2 Dec 03 17:05:22 crc kubenswrapper[4768]: I1203 17:05:22.804920 4768 generic.go:334] "Generic (PLEG): container finished" podID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerID="49c0099eecd6eb30db379a06867a122cb67adffc90da4702f4fe6b6a4216e177" exitCode=0 Dec 03 17:05:22 crc kubenswrapper[4768]: I1203 17:05:22.806092 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45zmq" event={"ID":"04d4fd48-11a1-422c-bde1-221ca8b0a748","Type":"ContainerDied","Data":"49c0099eecd6eb30db379a06867a122cb67adffc90da4702f4fe6b6a4216e177"} Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.038625 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45zmq" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.173660 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-utilities\") pod \"04d4fd48-11a1-422c-bde1-221ca8b0a748\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.173799 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-catalog-content\") pod \"04d4fd48-11a1-422c-bde1-221ca8b0a748\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.173866 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kn5xw\" (UniqueName: \"kubernetes.io/projected/04d4fd48-11a1-422c-bde1-221ca8b0a748-kube-api-access-kn5xw\") pod \"04d4fd48-11a1-422c-bde1-221ca8b0a748\" (UID: \"04d4fd48-11a1-422c-bde1-221ca8b0a748\") " Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.174649 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-utilities" (OuterVolumeSpecName: "utilities") pod "04d4fd48-11a1-422c-bde1-221ca8b0a748" (UID: "04d4fd48-11a1-422c-bde1-221ca8b0a748"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.180470 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04d4fd48-11a1-422c-bde1-221ca8b0a748-kube-api-access-kn5xw" (OuterVolumeSpecName: "kube-api-access-kn5xw") pod "04d4fd48-11a1-422c-bde1-221ca8b0a748" (UID: "04d4fd48-11a1-422c-bde1-221ca8b0a748"). InnerVolumeSpecName "kube-api-access-kn5xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.235716 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04d4fd48-11a1-422c-bde1-221ca8b0a748" (UID: "04d4fd48-11a1-422c-bde1-221ca8b0a748"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.276111 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kn5xw\" (UniqueName: \"kubernetes.io/projected/04d4fd48-11a1-422c-bde1-221ca8b0a748-kube-api-access-kn5xw\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.276156 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.276169 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04d4fd48-11a1-422c-bde1-221ca8b0a748-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.817633 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45zmq" event={"ID":"04d4fd48-11a1-422c-bde1-221ca8b0a748","Type":"ContainerDied","Data":"07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4"} Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.817725 4768 scope.go:117] "RemoveContainer" containerID="49c0099eecd6eb30db379a06867a122cb67adffc90da4702f4fe6b6a4216e177" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.817659 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45zmq" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.850871 4768 scope.go:117] "RemoveContainer" containerID="df6fee5843168efc4b483fbd4eefb39f99a2a304d942c1bcc6a27f80a30e1f4b" Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.853967 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45zmq"] Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.869375 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-45zmq"] Dec 03 17:05:23 crc kubenswrapper[4768]: I1203 17:05:23.888932 4768 scope.go:117] "RemoveContainer" containerID="3f2f9cb3ed310677e8e86e5b23a1bea03fdd9ead37120353688e68cadbb40310" Dec 03 17:05:25 crc kubenswrapper[4768]: I1203 17:05:25.552425 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" path="/var/lib/kubelet/pods/04d4fd48-11a1-422c-bde1-221ca8b0a748/volumes" Dec 03 17:05:26 crc kubenswrapper[4768]: E1203 17:05:26.820280 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice/crio-07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice\": RecentStats: unable to find data in memory cache]" Dec 03 17:05:29 crc kubenswrapper[4768]: I1203 17:05:29.899767 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hpghb"] Dec 03 17:05:29 crc kubenswrapper[4768]: E1203 17:05:29.900709 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="registry-server" Dec 03 17:05:29 crc kubenswrapper[4768]: I1203 17:05:29.900727 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="registry-server" Dec 03 17:05:29 crc kubenswrapper[4768]: E1203 17:05:29.900787 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="extract-utilities" Dec 03 17:05:29 crc kubenswrapper[4768]: I1203 17:05:29.900799 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="extract-utilities" Dec 03 17:05:29 crc kubenswrapper[4768]: E1203 17:05:29.900817 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="extract-content" Dec 03 17:05:29 crc kubenswrapper[4768]: I1203 17:05:29.900826 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="extract-content" Dec 03 17:05:29 crc kubenswrapper[4768]: I1203 17:05:29.901114 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="04d4fd48-11a1-422c-bde1-221ca8b0a748" containerName="registry-server" Dec 03 17:05:29 crc kubenswrapper[4768]: I1203 17:05:29.903464 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:29 crc kubenswrapper[4768]: I1203 17:05:29.918212 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hpghb"] Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.018793 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-utilities\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.019296 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqkf4\" (UniqueName: \"kubernetes.io/projected/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-kube-api-access-dqkf4\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.019331 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-catalog-content\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.121613 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqkf4\" (UniqueName: \"kubernetes.io/projected/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-kube-api-access-dqkf4\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.121724 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-catalog-content\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.121806 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-utilities\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.122445 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-catalog-content\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.122506 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-utilities\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.147503 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqkf4\" (UniqueName: \"kubernetes.io/projected/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-kube-api-access-dqkf4\") pod \"certified-operators-hpghb\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.244790 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.764026 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hpghb"] Dec 03 17:05:30 crc kubenswrapper[4768]: I1203 17:05:30.888029 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpghb" event={"ID":"dbfc0fb3-3427-49f8-a898-d25d9b9896c3","Type":"ContainerStarted","Data":"ca1b8f04d25b228a3377bb4ae70bdfe9658cbd31e9dbfafb7244714e5dda4325"} Dec 03 17:05:31 crc kubenswrapper[4768]: I1203 17:05:31.897985 4768 generic.go:334] "Generic (PLEG): container finished" podID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerID="0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28" exitCode=0 Dec 03 17:05:31 crc kubenswrapper[4768]: I1203 17:05:31.898032 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpghb" event={"ID":"dbfc0fb3-3427-49f8-a898-d25d9b9896c3","Type":"ContainerDied","Data":"0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28"} Dec 03 17:05:32 crc kubenswrapper[4768]: I1203 17:05:32.926397 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpghb" event={"ID":"dbfc0fb3-3427-49f8-a898-d25d9b9896c3","Type":"ContainerStarted","Data":"c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43"} Dec 03 17:05:34 crc kubenswrapper[4768]: I1203 17:05:34.951361 4768 generic.go:334] "Generic (PLEG): container finished" podID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerID="c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43" exitCode=0 Dec 03 17:05:34 crc kubenswrapper[4768]: I1203 17:05:34.951495 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpghb" event={"ID":"dbfc0fb3-3427-49f8-a898-d25d9b9896c3","Type":"ContainerDied","Data":"c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43"} Dec 03 17:05:35 crc kubenswrapper[4768]: I1203 17:05:35.964094 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpghb" event={"ID":"dbfc0fb3-3427-49f8-a898-d25d9b9896c3","Type":"ContainerStarted","Data":"e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766"} Dec 03 17:05:37 crc kubenswrapper[4768]: I1203 17:05:37.001840 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hpghb" podStartSLOduration=4.209261057 podStartE2EDuration="8.001818164s" podCreationTimestamp="2025-12-03 17:05:29 +0000 UTC" firstStartedPulling="2025-12-03 17:05:31.900908637 +0000 UTC m=+2828.820245060" lastFinishedPulling="2025-12-03 17:05:35.693465734 +0000 UTC m=+2832.612802167" observedRunningTime="2025-12-03 17:05:36.991995469 +0000 UTC m=+2833.911331892" watchObservedRunningTime="2025-12-03 17:05:37.001818164 +0000 UTC m=+2833.921154607" Dec 03 17:05:37 crc kubenswrapper[4768]: E1203 17:05:37.115091 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice/crio-07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4\": RecentStats: unable to find data in memory cache]" Dec 03 17:05:40 crc kubenswrapper[4768]: I1203 17:05:40.244963 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:40 crc kubenswrapper[4768]: I1203 17:05:40.245420 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:40 crc kubenswrapper[4768]: I1203 17:05:40.326544 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:41 crc kubenswrapper[4768]: I1203 17:05:41.074615 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:41 crc kubenswrapper[4768]: I1203 17:05:41.139403 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hpghb"] Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.045013 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hpghb" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="registry-server" containerID="cri-o://e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766" gracePeriod=2 Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.597425 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.717494 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-utilities\") pod \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.717576 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-catalog-content\") pod \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.717870 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqkf4\" (UniqueName: \"kubernetes.io/projected/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-kube-api-access-dqkf4\") pod \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\" (UID: \"dbfc0fb3-3427-49f8-a898-d25d9b9896c3\") " Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.718630 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-utilities" (OuterVolumeSpecName: "utilities") pod "dbfc0fb3-3427-49f8-a898-d25d9b9896c3" (UID: "dbfc0fb3-3427-49f8-a898-d25d9b9896c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.729790 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-kube-api-access-dqkf4" (OuterVolumeSpecName: "kube-api-access-dqkf4") pod "dbfc0fb3-3427-49f8-a898-d25d9b9896c3" (UID: "dbfc0fb3-3427-49f8-a898-d25d9b9896c3"). InnerVolumeSpecName "kube-api-access-dqkf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.771496 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dbfc0fb3-3427-49f8-a898-d25d9b9896c3" (UID: "dbfc0fb3-3427-49f8-a898-d25d9b9896c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.820036 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqkf4\" (UniqueName: \"kubernetes.io/projected/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-kube-api-access-dqkf4\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.820086 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:43 crc kubenswrapper[4768]: I1203 17:05:43.820098 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dbfc0fb3-3427-49f8-a898-d25d9b9896c3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.060759 4768 generic.go:334] "Generic (PLEG): container finished" podID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerID="e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766" exitCode=0 Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.060814 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpghb" event={"ID":"dbfc0fb3-3427-49f8-a898-d25d9b9896c3","Type":"ContainerDied","Data":"e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766"} Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.060867 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hpghb" event={"ID":"dbfc0fb3-3427-49f8-a898-d25d9b9896c3","Type":"ContainerDied","Data":"ca1b8f04d25b228a3377bb4ae70bdfe9658cbd31e9dbfafb7244714e5dda4325"} Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.060890 4768 scope.go:117] "RemoveContainer" containerID="e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.060957 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hpghb" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.095240 4768 scope.go:117] "RemoveContainer" containerID="c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.125514 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hpghb"] Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.141318 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hpghb"] Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.153639 4768 scope.go:117] "RemoveContainer" containerID="0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.206856 4768 scope.go:117] "RemoveContainer" containerID="e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766" Dec 03 17:05:44 crc kubenswrapper[4768]: E1203 17:05:44.207835 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766\": container with ID starting with e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766 not found: ID does not exist" containerID="e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.207868 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766"} err="failed to get container status \"e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766\": rpc error: code = NotFound desc = could not find container \"e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766\": container with ID starting with e862ad22bf6bfaca5cc3fb781f30e24431dc00c6f72edef3f7396cfd5ca00766 not found: ID does not exist" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.207909 4768 scope.go:117] "RemoveContainer" containerID="c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43" Dec 03 17:05:44 crc kubenswrapper[4768]: E1203 17:05:44.208198 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43\": container with ID starting with c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43 not found: ID does not exist" containerID="c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.208221 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43"} err="failed to get container status \"c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43\": rpc error: code = NotFound desc = could not find container \"c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43\": container with ID starting with c968805e7b54a56ce2f8666bf9f6db3361e590025e910a94af1c83caef9e9a43 not found: ID does not exist" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.208251 4768 scope.go:117] "RemoveContainer" containerID="0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28" Dec 03 17:05:44 crc kubenswrapper[4768]: E1203 17:05:44.208924 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28\": container with ID starting with 0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28 not found: ID does not exist" containerID="0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28" Dec 03 17:05:44 crc kubenswrapper[4768]: I1203 17:05:44.208967 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28"} err="failed to get container status \"0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28\": rpc error: code = NotFound desc = could not find container \"0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28\": container with ID starting with 0cb0018cb0b0395dee618b3a6d74b700252617838a30f86623e1b31fdd1b4d28 not found: ID does not exist" Dec 03 17:05:45 crc kubenswrapper[4768]: I1203 17:05:45.549137 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" path="/var/lib/kubelet/pods/dbfc0fb3-3427-49f8-a898-d25d9b9896c3/volumes" Dec 03 17:05:47 crc kubenswrapper[4768]: E1203 17:05:47.383438 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice/crio-07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice\": RecentStats: unable to find data in memory cache]" Dec 03 17:05:57 crc kubenswrapper[4768]: E1203 17:05:57.693932 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice/crio-07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice\": RecentStats: unable to find data in memory cache]" Dec 03 17:06:07 crc kubenswrapper[4768]: E1203 17:06:07.953322 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice/crio-07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4\": RecentStats: unable to find data in memory cache]" Dec 03 17:06:18 crc kubenswrapper[4768]: E1203 17:06:18.269682 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04d4fd48_11a1_422c_bde1_221ca8b0a748.slice/crio-07b5336bc87b566715a8ce326acc9508f4b0edf7f9cdece05d68c6fc7a527bf4\": RecentStats: unable to find data in memory cache]" Dec 03 17:06:26 crc kubenswrapper[4768]: I1203 17:06:26.028443 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:06:26 crc kubenswrapper[4768]: I1203 17:06:26.029805 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:06:53 crc kubenswrapper[4768]: I1203 17:06:53.882455 4768 generic.go:334] "Generic (PLEG): container finished" podID="2025631a-ad01-494e-a78d-095aaedfa302" containerID="cc7490c33551c7e7168660f0f0178987719e28d41ba24ba0431aabdf88c9df51" exitCode=0 Dec 03 17:06:53 crc kubenswrapper[4768]: I1203 17:06:53.882543 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" event={"ID":"2025631a-ad01-494e-a78d-095aaedfa302","Type":"ContainerDied","Data":"cc7490c33551c7e7168660f0f0178987719e28d41ba24ba0431aabdf88c9df51"} Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.414053 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.521165 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dmls\" (UniqueName: \"kubernetes.io/projected/2025631a-ad01-494e-a78d-095aaedfa302-kube-api-access-7dmls\") pod \"2025631a-ad01-494e-a78d-095aaedfa302\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.521364 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-inventory\") pod \"2025631a-ad01-494e-a78d-095aaedfa302\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.521405 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-combined-ca-bundle\") pod \"2025631a-ad01-494e-a78d-095aaedfa302\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.521426 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-ssh-key\") pod \"2025631a-ad01-494e-a78d-095aaedfa302\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.521453 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-secret-0\") pod \"2025631a-ad01-494e-a78d-095aaedfa302\" (UID: \"2025631a-ad01-494e-a78d-095aaedfa302\") " Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.527819 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2025631a-ad01-494e-a78d-095aaedfa302-kube-api-access-7dmls" (OuterVolumeSpecName: "kube-api-access-7dmls") pod "2025631a-ad01-494e-a78d-095aaedfa302" (UID: "2025631a-ad01-494e-a78d-095aaedfa302"). InnerVolumeSpecName "kube-api-access-7dmls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.528906 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "2025631a-ad01-494e-a78d-095aaedfa302" (UID: "2025631a-ad01-494e-a78d-095aaedfa302"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.560723 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2025631a-ad01-494e-a78d-095aaedfa302" (UID: "2025631a-ad01-494e-a78d-095aaedfa302"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.561240 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "2025631a-ad01-494e-a78d-095aaedfa302" (UID: "2025631a-ad01-494e-a78d-095aaedfa302"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.566379 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-inventory" (OuterVolumeSpecName: "inventory") pod "2025631a-ad01-494e-a78d-095aaedfa302" (UID: "2025631a-ad01-494e-a78d-095aaedfa302"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.624674 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dmls\" (UniqueName: \"kubernetes.io/projected/2025631a-ad01-494e-a78d-095aaedfa302-kube-api-access-7dmls\") on node \"crc\" DevicePath \"\"" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.624717 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.624732 4768 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.624745 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.624761 4768 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/2025631a-ad01-494e-a78d-095aaedfa302-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.902235 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" event={"ID":"2025631a-ad01-494e-a78d-095aaedfa302","Type":"ContainerDied","Data":"fd61e34a07e357625e7734ff5253680e07426d44620031ac4a1db15edde06fa8"} Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.902483 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd61e34a07e357625e7734ff5253680e07426d44620031ac4a1db15edde06fa8" Dec 03 17:06:55 crc kubenswrapper[4768]: I1203 17:06:55.902286 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.012689 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd"] Dec 03 17:06:56 crc kubenswrapper[4768]: E1203 17:06:56.013223 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="extract-content" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.013248 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="extract-content" Dec 03 17:06:56 crc kubenswrapper[4768]: E1203 17:06:56.013285 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="extract-utilities" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.013298 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="extract-utilities" Dec 03 17:06:56 crc kubenswrapper[4768]: E1203 17:06:56.013312 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2025631a-ad01-494e-a78d-095aaedfa302" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.013321 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2025631a-ad01-494e-a78d-095aaedfa302" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 17:06:56 crc kubenswrapper[4768]: E1203 17:06:56.013349 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="registry-server" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.013357 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="registry-server" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.013651 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbfc0fb3-3427-49f8-a898-d25d9b9896c3" containerName="registry-server" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.013677 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2025631a-ad01-494e-a78d-095aaedfa302" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.014637 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.017205 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.017475 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.017695 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.017858 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.019135 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.019453 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.024007 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd"] Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.025063 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.028091 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.028140 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136289 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136438 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136510 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136568 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95t4c\" (UniqueName: \"kubernetes.io/projected/d11533b9-aa83-4403-8c50-0172908b6cc3-kube-api-access-95t4c\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136590 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136660 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136754 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136804 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.136987 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.237757 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.237810 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.237840 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.237864 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.237897 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95t4c\" (UniqueName: \"kubernetes.io/projected/d11533b9-aa83-4403-8c50-0172908b6cc3-kube-api-access-95t4c\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.237916 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.237937 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.238014 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.238041 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.238983 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.246350 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.246350 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.246379 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.246863 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.247031 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.247245 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.248013 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.264188 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95t4c\" (UniqueName: \"kubernetes.io/projected/d11533b9-aa83-4403-8c50-0172908b6cc3-kube-api-access-95t4c\") pod \"nova-edpm-deployment-openstack-edpm-ipam-4vmcd\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.331271 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.897932 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd"] Dec 03 17:06:56 crc kubenswrapper[4768]: I1203 17:06:56.918747 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" event={"ID":"d11533b9-aa83-4403-8c50-0172908b6cc3","Type":"ContainerStarted","Data":"a9a36cbd1d5abd0c03e0ac802d0a20e46967f4f7488c449084a5c57048150227"} Dec 03 17:06:57 crc kubenswrapper[4768]: I1203 17:06:57.940184 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" event={"ID":"d11533b9-aa83-4403-8c50-0172908b6cc3","Type":"ContainerStarted","Data":"1af4b5e397e11c7e26ac64c79e02f141f956aa07e4ab0b2c6ea8f337a97537df"} Dec 03 17:06:57 crc kubenswrapper[4768]: I1203 17:06:57.973625 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" podStartSLOduration=2.508979583 podStartE2EDuration="2.973582726s" podCreationTimestamp="2025-12-03 17:06:55 +0000 UTC" firstStartedPulling="2025-12-03 17:06:56.9080257 +0000 UTC m=+2913.827362123" lastFinishedPulling="2025-12-03 17:06:57.372628843 +0000 UTC m=+2914.291965266" observedRunningTime="2025-12-03 17:06:57.966299522 +0000 UTC m=+2914.885635945" watchObservedRunningTime="2025-12-03 17:06:57.973582726 +0000 UTC m=+2914.892919159" Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.028290 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.029030 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.029091 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.030273 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.030369 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" gracePeriod=600 Dec 03 17:07:26 crc kubenswrapper[4768]: E1203 17:07:26.161894 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.275436 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" exitCode=0 Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.275737 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb"} Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.275798 4768 scope.go:117] "RemoveContainer" containerID="3db9d0405ce9b05744291dfb1df566810f933a1e27a024d849353994efa77a58" Dec 03 17:07:26 crc kubenswrapper[4768]: I1203 17:07:26.276776 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:07:26 crc kubenswrapper[4768]: E1203 17:07:26.277209 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:07:38 crc kubenswrapper[4768]: I1203 17:07:38.531349 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:07:38 crc kubenswrapper[4768]: E1203 17:07:38.532214 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:07:52 crc kubenswrapper[4768]: I1203 17:07:52.531878 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:07:52 crc kubenswrapper[4768]: E1203 17:07:52.532779 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:08:04 crc kubenswrapper[4768]: I1203 17:08:04.531750 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:08:04 crc kubenswrapper[4768]: E1203 17:08:04.532530 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:08:17 crc kubenswrapper[4768]: I1203 17:08:17.533040 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:08:17 crc kubenswrapper[4768]: E1203 17:08:17.533784 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:08:32 crc kubenswrapper[4768]: I1203 17:08:32.532942 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:08:32 crc kubenswrapper[4768]: E1203 17:08:32.533794 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:08:47 crc kubenswrapper[4768]: I1203 17:08:47.532832 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:08:47 crc kubenswrapper[4768]: E1203 17:08:47.533675 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:09:02 crc kubenswrapper[4768]: I1203 17:09:02.531948 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:09:02 crc kubenswrapper[4768]: E1203 17:09:02.532887 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:09:15 crc kubenswrapper[4768]: I1203 17:09:15.531500 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:09:15 crc kubenswrapper[4768]: E1203 17:09:15.532346 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:09:30 crc kubenswrapper[4768]: I1203 17:09:30.531702 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:09:30 crc kubenswrapper[4768]: E1203 17:09:30.532374 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:09:41 crc kubenswrapper[4768]: I1203 17:09:41.532984 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:09:41 crc kubenswrapper[4768]: E1203 17:09:41.534046 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:09:48 crc kubenswrapper[4768]: I1203 17:09:48.754223 4768 generic.go:334] "Generic (PLEG): container finished" podID="d11533b9-aa83-4403-8c50-0172908b6cc3" containerID="1af4b5e397e11c7e26ac64c79e02f141f956aa07e4ab0b2c6ea8f337a97537df" exitCode=0 Dec 03 17:09:48 crc kubenswrapper[4768]: I1203 17:09:48.754275 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" event={"ID":"d11533b9-aa83-4403-8c50-0172908b6cc3","Type":"ContainerDied","Data":"1af4b5e397e11c7e26ac64c79e02f141f956aa07e4ab0b2c6ea8f337a97537df"} Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.310538 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.388533 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-ssh-key\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.388670 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-1\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.388703 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-0\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.388783 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-inventory\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.388809 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-combined-ca-bundle\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.388998 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-extra-config-0\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.389028 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95t4c\" (UniqueName: \"kubernetes.io/projected/d11533b9-aa83-4403-8c50-0172908b6cc3-kube-api-access-95t4c\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.389092 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-1\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.389142 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-0\") pod \"d11533b9-aa83-4403-8c50-0172908b6cc3\" (UID: \"d11533b9-aa83-4403-8c50-0172908b6cc3\") " Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.398512 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d11533b9-aa83-4403-8c50-0172908b6cc3-kube-api-access-95t4c" (OuterVolumeSpecName: "kube-api-access-95t4c") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "kube-api-access-95t4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.402940 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.423944 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-inventory" (OuterVolumeSpecName: "inventory") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.428548 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.433330 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.438407 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.446590 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.455298 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.474134 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "d11533b9-aa83-4403-8c50-0172908b6cc3" (UID: "d11533b9-aa83-4403-8c50-0172908b6cc3"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491090 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95t4c\" (UniqueName: \"kubernetes.io/projected/d11533b9-aa83-4403-8c50-0172908b6cc3-kube-api-access-95t4c\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491122 4768 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491134 4768 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491145 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491156 4768 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491168 4768 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491177 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491189 4768 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.491198 4768 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/d11533b9-aa83-4403-8c50-0172908b6cc3-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.773420 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" event={"ID":"d11533b9-aa83-4403-8c50-0172908b6cc3","Type":"ContainerDied","Data":"a9a36cbd1d5abd0c03e0ac802d0a20e46967f4f7488c449084a5c57048150227"} Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.773467 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9a36cbd1d5abd0c03e0ac802d0a20e46967f4f7488c449084a5c57048150227" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.773527 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-4vmcd" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.904780 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp"] Dec 03 17:09:50 crc kubenswrapper[4768]: E1203 17:09:50.905334 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d11533b9-aa83-4403-8c50-0172908b6cc3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.905357 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="d11533b9-aa83-4403-8c50-0172908b6cc3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.905656 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="d11533b9-aa83-4403-8c50-0172908b6cc3" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.906592 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.909851 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.909979 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.910060 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.910250 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l7hrr" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.910244 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 03 17:09:50 crc kubenswrapper[4768]: I1203 17:09:50.914859 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp"] Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.000476 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t764l\" (UniqueName: \"kubernetes.io/projected/4f7d210c-5ea0-4b66-88f7-d8830a52109c-kube-api-access-t764l\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.000862 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.000911 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.000944 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.000979 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.001194 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.001315 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.102306 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.102370 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.102457 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t764l\" (UniqueName: \"kubernetes.io/projected/4f7d210c-5ea0-4b66-88f7-d8830a52109c-kube-api-access-t764l\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.102502 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.102530 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.102557 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.102580 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.105811 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.106647 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.106653 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.108152 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.109169 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.115145 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.124328 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t764l\" (UniqueName: \"kubernetes.io/projected/4f7d210c-5ea0-4b66-88f7-d8830a52109c-kube-api-access-t764l\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.238864 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.831815 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp"] Dec 03 17:09:51 crc kubenswrapper[4768]: I1203 17:09:51.833792 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:09:52 crc kubenswrapper[4768]: I1203 17:09:52.531625 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:09:52 crc kubenswrapper[4768]: E1203 17:09:52.532275 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:09:52 crc kubenswrapper[4768]: I1203 17:09:52.808444 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" event={"ID":"4f7d210c-5ea0-4b66-88f7-d8830a52109c","Type":"ContainerStarted","Data":"b5b11a039b5f8007d112c630b76b155e92367078e8fbed08fc3366531877213e"} Dec 03 17:09:52 crc kubenswrapper[4768]: I1203 17:09:52.808497 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" event={"ID":"4f7d210c-5ea0-4b66-88f7-d8830a52109c","Type":"ContainerStarted","Data":"e92233053eec382065276a27df99f9573616b4783d4564c95c6d67351d166763"} Dec 03 17:09:52 crc kubenswrapper[4768]: I1203 17:09:52.833295 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" podStartSLOduration=2.346769234 podStartE2EDuration="2.833273514s" podCreationTimestamp="2025-12-03 17:09:50 +0000 UTC" firstStartedPulling="2025-12-03 17:09:51.833574909 +0000 UTC m=+3088.752911332" lastFinishedPulling="2025-12-03 17:09:52.320079189 +0000 UTC m=+3089.239415612" observedRunningTime="2025-12-03 17:09:52.824763396 +0000 UTC m=+3089.744099839" watchObservedRunningTime="2025-12-03 17:09:52.833273514 +0000 UTC m=+3089.752609947" Dec 03 17:10:03 crc kubenswrapper[4768]: I1203 17:10:03.540436 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:10:03 crc kubenswrapper[4768]: E1203 17:10:03.541311 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:10:18 crc kubenswrapper[4768]: I1203 17:10:18.531739 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:10:18 crc kubenswrapper[4768]: E1203 17:10:18.532588 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:10:29 crc kubenswrapper[4768]: I1203 17:10:29.532329 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:10:29 crc kubenswrapper[4768]: E1203 17:10:29.533159 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:10:43 crc kubenswrapper[4768]: I1203 17:10:43.540562 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:10:43 crc kubenswrapper[4768]: E1203 17:10:43.541459 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:10:54 crc kubenswrapper[4768]: I1203 17:10:54.531785 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:10:54 crc kubenswrapper[4768]: E1203 17:10:54.532671 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:11:09 crc kubenswrapper[4768]: I1203 17:11:09.531391 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:11:09 crc kubenswrapper[4768]: E1203 17:11:09.532224 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:11:21 crc kubenswrapper[4768]: I1203 17:11:21.532993 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:11:21 crc kubenswrapper[4768]: E1203 17:11:21.534344 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:11:35 crc kubenswrapper[4768]: I1203 17:11:35.533267 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:11:35 crc kubenswrapper[4768]: E1203 17:11:35.534715 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:11:50 crc kubenswrapper[4768]: I1203 17:11:50.531903 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:11:50 crc kubenswrapper[4768]: E1203 17:11:50.532884 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:12:05 crc kubenswrapper[4768]: I1203 17:12:05.531547 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:12:05 crc kubenswrapper[4768]: E1203 17:12:05.532301 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:12:20 crc kubenswrapper[4768]: I1203 17:12:20.532102 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:12:20 crc kubenswrapper[4768]: E1203 17:12:20.533466 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:12:26 crc kubenswrapper[4768]: I1203 17:12:26.518342 4768 generic.go:334] "Generic (PLEG): container finished" podID="4f7d210c-5ea0-4b66-88f7-d8830a52109c" containerID="b5b11a039b5f8007d112c630b76b155e92367078e8fbed08fc3366531877213e" exitCode=0 Dec 03 17:12:26 crc kubenswrapper[4768]: I1203 17:12:26.518405 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" event={"ID":"4f7d210c-5ea0-4b66-88f7-d8830a52109c","Type":"ContainerDied","Data":"b5b11a039b5f8007d112c630b76b155e92367078e8fbed08fc3366531877213e"} Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.008059 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.053822 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-2\") pod \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.053934 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t764l\" (UniqueName: \"kubernetes.io/projected/4f7d210c-5ea0-4b66-88f7-d8830a52109c-kube-api-access-t764l\") pod \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.054003 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ssh-key\") pod \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.054037 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-inventory\") pod \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.054077 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-0\") pod \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.054137 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-telemetry-combined-ca-bundle\") pod \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.054172 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-1\") pod \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\" (UID: \"4f7d210c-5ea0-4b66-88f7-d8830a52109c\") " Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.060998 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "4f7d210c-5ea0-4b66-88f7-d8830a52109c" (UID: "4f7d210c-5ea0-4b66-88f7-d8830a52109c"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.061286 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7d210c-5ea0-4b66-88f7-d8830a52109c-kube-api-access-t764l" (OuterVolumeSpecName: "kube-api-access-t764l") pod "4f7d210c-5ea0-4b66-88f7-d8830a52109c" (UID: "4f7d210c-5ea0-4b66-88f7-d8830a52109c"). InnerVolumeSpecName "kube-api-access-t764l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.089587 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "4f7d210c-5ea0-4b66-88f7-d8830a52109c" (UID: "4f7d210c-5ea0-4b66-88f7-d8830a52109c"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.089836 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "4f7d210c-5ea0-4b66-88f7-d8830a52109c" (UID: "4f7d210c-5ea0-4b66-88f7-d8830a52109c"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.103470 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "4f7d210c-5ea0-4b66-88f7-d8830a52109c" (UID: "4f7d210c-5ea0-4b66-88f7-d8830a52109c"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.106771 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "4f7d210c-5ea0-4b66-88f7-d8830a52109c" (UID: "4f7d210c-5ea0-4b66-88f7-d8830a52109c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.113520 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-inventory" (OuterVolumeSpecName: "inventory") pod "4f7d210c-5ea0-4b66-88f7-d8830a52109c" (UID: "4f7d210c-5ea0-4b66-88f7-d8830a52109c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.157299 4768 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.157348 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t764l\" (UniqueName: \"kubernetes.io/projected/4f7d210c-5ea0-4b66-88f7-d8830a52109c-kube-api-access-t764l\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.157359 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.157368 4768 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-inventory\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.157377 4768 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.157387 4768 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.157396 4768 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/4f7d210c-5ea0-4b66-88f7-d8830a52109c-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.545337 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" event={"ID":"4f7d210c-5ea0-4b66-88f7-d8830a52109c","Type":"ContainerDied","Data":"e92233053eec382065276a27df99f9573616b4783d4564c95c6d67351d166763"} Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.545383 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e92233053eec382065276a27df99f9573616b4783d4564c95c6d67351d166763" Dec 03 17:12:28 crc kubenswrapper[4768]: I1203 17:12:28.545396 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp" Dec 03 17:12:32 crc kubenswrapper[4768]: I1203 17:12:32.532575 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:12:33 crc kubenswrapper[4768]: I1203 17:12:33.603653 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"66ff691d2a6da1d5243d7e74510489b51f924821d075bd7a68e9d550b97250eb"} Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.721988 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8tcnz"] Dec 03 17:12:40 crc kubenswrapper[4768]: E1203 17:12:40.723576 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7d210c-5ea0-4b66-88f7-d8830a52109c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.723655 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7d210c-5ea0-4b66-88f7-d8830a52109c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.724236 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7d210c-5ea0-4b66-88f7-d8830a52109c" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.728318 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.745502 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tcnz"] Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.883353 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crxm7\" (UniqueName: \"kubernetes.io/projected/03833a41-0dea-46c7-b560-ce552a395d42-kube-api-access-crxm7\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.883436 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-utilities\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.883504 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-catalog-content\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.986229 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crxm7\" (UniqueName: \"kubernetes.io/projected/03833a41-0dea-46c7-b560-ce552a395d42-kube-api-access-crxm7\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.986340 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-utilities\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.986500 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-catalog-content\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.986975 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-utilities\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:40 crc kubenswrapper[4768]: I1203 17:12:40.987180 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-catalog-content\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:41 crc kubenswrapper[4768]: I1203 17:12:41.006249 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crxm7\" (UniqueName: \"kubernetes.io/projected/03833a41-0dea-46c7-b560-ce552a395d42-kube-api-access-crxm7\") pod \"redhat-marketplace-8tcnz\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:41 crc kubenswrapper[4768]: I1203 17:12:41.069661 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:41 crc kubenswrapper[4768]: I1203 17:12:41.571823 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tcnz"] Dec 03 17:12:41 crc kubenswrapper[4768]: I1203 17:12:41.692366 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tcnz" event={"ID":"03833a41-0dea-46c7-b560-ce552a395d42","Type":"ContainerStarted","Data":"23f2996cd1163db988ad19a4307afbc4865f20bf6809aa34a19655a6b24b03d3"} Dec 03 17:12:42 crc kubenswrapper[4768]: I1203 17:12:42.706260 4768 generic.go:334] "Generic (PLEG): container finished" podID="03833a41-0dea-46c7-b560-ce552a395d42" containerID="b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473" exitCode=0 Dec 03 17:12:42 crc kubenswrapper[4768]: I1203 17:12:42.706367 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tcnz" event={"ID":"03833a41-0dea-46c7-b560-ce552a395d42","Type":"ContainerDied","Data":"b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473"} Dec 03 17:12:43 crc kubenswrapper[4768]: I1203 17:12:43.719402 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tcnz" event={"ID":"03833a41-0dea-46c7-b560-ce552a395d42","Type":"ContainerStarted","Data":"8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add"} Dec 03 17:12:44 crc kubenswrapper[4768]: I1203 17:12:44.733813 4768 generic.go:334] "Generic (PLEG): container finished" podID="03833a41-0dea-46c7-b560-ce552a395d42" containerID="8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add" exitCode=0 Dec 03 17:12:44 crc kubenswrapper[4768]: I1203 17:12:44.733866 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tcnz" event={"ID":"03833a41-0dea-46c7-b560-ce552a395d42","Type":"ContainerDied","Data":"8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add"} Dec 03 17:12:45 crc kubenswrapper[4768]: I1203 17:12:45.746258 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tcnz" event={"ID":"03833a41-0dea-46c7-b560-ce552a395d42","Type":"ContainerStarted","Data":"452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492"} Dec 03 17:12:45 crc kubenswrapper[4768]: I1203 17:12:45.779236 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8tcnz" podStartSLOduration=3.305077238 podStartE2EDuration="5.779219022s" podCreationTimestamp="2025-12-03 17:12:40 +0000 UTC" firstStartedPulling="2025-12-03 17:12:42.709025763 +0000 UTC m=+3259.628362186" lastFinishedPulling="2025-12-03 17:12:45.183167547 +0000 UTC m=+3262.102503970" observedRunningTime="2025-12-03 17:12:45.774158806 +0000 UTC m=+3262.693495269" watchObservedRunningTime="2025-12-03 17:12:45.779219022 +0000 UTC m=+3262.698555445" Dec 03 17:12:51 crc kubenswrapper[4768]: I1203 17:12:51.070828 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:51 crc kubenswrapper[4768]: I1203 17:12:51.073834 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:51 crc kubenswrapper[4768]: I1203 17:12:51.126418 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:51 crc kubenswrapper[4768]: I1203 17:12:51.930526 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:51 crc kubenswrapper[4768]: I1203 17:12:51.993816 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tcnz"] Dec 03 17:12:53 crc kubenswrapper[4768]: I1203 17:12:53.859522 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8tcnz" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="registry-server" containerID="cri-o://452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492" gracePeriod=2 Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.356945 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.482583 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-catalog-content\") pod \"03833a41-0dea-46c7-b560-ce552a395d42\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.482944 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crxm7\" (UniqueName: \"kubernetes.io/projected/03833a41-0dea-46c7-b560-ce552a395d42-kube-api-access-crxm7\") pod \"03833a41-0dea-46c7-b560-ce552a395d42\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.483150 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-utilities\") pod \"03833a41-0dea-46c7-b560-ce552a395d42\" (UID: \"03833a41-0dea-46c7-b560-ce552a395d42\") " Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.483951 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-utilities" (OuterVolumeSpecName: "utilities") pod "03833a41-0dea-46c7-b560-ce552a395d42" (UID: "03833a41-0dea-46c7-b560-ce552a395d42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.489121 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03833a41-0dea-46c7-b560-ce552a395d42-kube-api-access-crxm7" (OuterVolumeSpecName: "kube-api-access-crxm7") pod "03833a41-0dea-46c7-b560-ce552a395d42" (UID: "03833a41-0dea-46c7-b560-ce552a395d42"). InnerVolumeSpecName "kube-api-access-crxm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.515731 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03833a41-0dea-46c7-b560-ce552a395d42" (UID: "03833a41-0dea-46c7-b560-ce552a395d42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.585470 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crxm7\" (UniqueName: \"kubernetes.io/projected/03833a41-0dea-46c7-b560-ce552a395d42-kube-api-access-crxm7\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.585500 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.585510 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03833a41-0dea-46c7-b560-ce552a395d42-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.881681 4768 generic.go:334] "Generic (PLEG): container finished" podID="03833a41-0dea-46c7-b560-ce552a395d42" containerID="452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492" exitCode=0 Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.881736 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tcnz" event={"ID":"03833a41-0dea-46c7-b560-ce552a395d42","Type":"ContainerDied","Data":"452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492"} Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.881792 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8tcnz" event={"ID":"03833a41-0dea-46c7-b560-ce552a395d42","Type":"ContainerDied","Data":"23f2996cd1163db988ad19a4307afbc4865f20bf6809aa34a19655a6b24b03d3"} Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.881821 4768 scope.go:117] "RemoveContainer" containerID="452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.881821 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8tcnz" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.930440 4768 scope.go:117] "RemoveContainer" containerID="8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add" Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.938145 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tcnz"] Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.949123 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8tcnz"] Dec 03 17:12:54 crc kubenswrapper[4768]: I1203 17:12:54.965739 4768 scope.go:117] "RemoveContainer" containerID="b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473" Dec 03 17:12:55 crc kubenswrapper[4768]: I1203 17:12:55.006354 4768 scope.go:117] "RemoveContainer" containerID="452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492" Dec 03 17:12:55 crc kubenswrapper[4768]: E1203 17:12:55.012967 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492\": container with ID starting with 452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492 not found: ID does not exist" containerID="452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492" Dec 03 17:12:55 crc kubenswrapper[4768]: I1203 17:12:55.012997 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492"} err="failed to get container status \"452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492\": rpc error: code = NotFound desc = could not find container \"452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492\": container with ID starting with 452a66a55417dbd5080941bb756d9a5a107de5971ca7807fa217b3b9025ae492 not found: ID does not exist" Dec 03 17:12:55 crc kubenswrapper[4768]: I1203 17:12:55.013018 4768 scope.go:117] "RemoveContainer" containerID="8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add" Dec 03 17:12:55 crc kubenswrapper[4768]: E1203 17:12:55.013632 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add\": container with ID starting with 8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add not found: ID does not exist" containerID="8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add" Dec 03 17:12:55 crc kubenswrapper[4768]: I1203 17:12:55.013674 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add"} err="failed to get container status \"8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add\": rpc error: code = NotFound desc = could not find container \"8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add\": container with ID starting with 8de815df7a6d8f5d816b205ed0de9a788b6736e3beae28866f23d18303430add not found: ID does not exist" Dec 03 17:12:55 crc kubenswrapper[4768]: I1203 17:12:55.013701 4768 scope.go:117] "RemoveContainer" containerID="b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473" Dec 03 17:12:55 crc kubenswrapper[4768]: E1203 17:12:55.013986 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473\": container with ID starting with b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473 not found: ID does not exist" containerID="b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473" Dec 03 17:12:55 crc kubenswrapper[4768]: I1203 17:12:55.014014 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473"} err="failed to get container status \"b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473\": rpc error: code = NotFound desc = could not find container \"b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473\": container with ID starting with b1849cf8f5abb1e7d29aeeac5bcb76f6c4e5e14c42023a2b767e8ce2d90a3473 not found: ID does not exist" Dec 03 17:12:55 crc kubenswrapper[4768]: I1203 17:12:55.545912 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03833a41-0dea-46c7-b560-ce552a395d42" path="/var/lib/kubelet/pods/03833a41-0dea-46c7-b560-ce552a395d42/volumes" Dec 03 17:13:11 crc kubenswrapper[4768]: E1203 17:13:11.231964 4768 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.38:44198->38.102.83.38:40273: write tcp 38.102.83.38:44198->38.102.83.38:40273: write: broken pipe Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.130237 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 17:14:00 crc kubenswrapper[4768]: E1203 17:14:00.131467 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="extract-content" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.131490 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="extract-content" Dec 03 17:14:00 crc kubenswrapper[4768]: E1203 17:14:00.131516 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="extract-utilities" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.131527 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="extract-utilities" Dec 03 17:14:00 crc kubenswrapper[4768]: E1203 17:14:00.131583 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="registry-server" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.131613 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="registry-server" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.131950 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="03833a41-0dea-46c7-b560-ce552a395d42" containerName="registry-server" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.133212 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.135234 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.136350 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.136825 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-wc2rm" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.140267 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.156067 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.262647 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.262761 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.262810 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.262973 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-config-data\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.263142 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.263269 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mz2lb\" (UniqueName: \"kubernetes.io/projected/430c4af3-d01b-4096-b87c-4adce312cb1b-kube-api-access-mz2lb\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.263307 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.263331 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.263547 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.365622 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.365689 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.365730 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.365750 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-config-data\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.365788 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.366206 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.366379 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.366986 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-config-data\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.367058 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mz2lb\" (UniqueName: \"kubernetes.io/projected/430c4af3-d01b-4096-b87c-4adce312cb1b-kube-api-access-mz2lb\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.367082 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.367374 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.368057 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.368126 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.368364 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.371928 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.372469 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.373687 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.382734 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mz2lb\" (UniqueName: \"kubernetes.io/projected/430c4af3-d01b-4096-b87c-4adce312cb1b-kube-api-access-mz2lb\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.404770 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.469065 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 17:14:00 crc kubenswrapper[4768]: I1203 17:14:00.962561 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 03 17:14:01 crc kubenswrapper[4768]: I1203 17:14:01.636674 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"430c4af3-d01b-4096-b87c-4adce312cb1b","Type":"ContainerStarted","Data":"35a99aeae36ae1d44ad18e0ad1cf29ada28bfc0c94e5239684f6e764876b9587"} Dec 03 17:14:39 crc kubenswrapper[4768]: E1203 17:14:39.783649 4768 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 03 17:14:39 crc kubenswrapper[4768]: E1203 17:14:39.784505 4768 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mz2lb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(430c4af3-d01b-4096-b87c-4adce312cb1b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 03 17:14:39 crc kubenswrapper[4768]: E1203 17:14:39.785736 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="430c4af3-d01b-4096-b87c-4adce312cb1b" Dec 03 17:14:40 crc kubenswrapper[4768]: E1203 17:14:40.075812 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="430c4af3-d01b-4096-b87c-4adce312cb1b" Dec 03 17:14:52 crc kubenswrapper[4768]: I1203 17:14:52.534502 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:14:52 crc kubenswrapper[4768]: I1203 17:14:52.984666 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 03 17:14:54 crc kubenswrapper[4768]: I1203 17:14:54.250886 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"430c4af3-d01b-4096-b87c-4adce312cb1b","Type":"ContainerStarted","Data":"bce380387f0492163161fbdd69715fa751e2d5ce3c9fb424523280ad1160ffab"} Dec 03 17:14:54 crc kubenswrapper[4768]: I1203 17:14:54.270340 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.256430314 podStartE2EDuration="55.270317952s" podCreationTimestamp="2025-12-03 17:13:59 +0000 UTC" firstStartedPulling="2025-12-03 17:14:00.968099022 +0000 UTC m=+3337.887435435" lastFinishedPulling="2025-12-03 17:14:52.98198666 +0000 UTC m=+3389.901323073" observedRunningTime="2025-12-03 17:14:54.265700267 +0000 UTC m=+3391.185036690" watchObservedRunningTime="2025-12-03 17:14:54.270317952 +0000 UTC m=+3391.189654385" Dec 03 17:14:56 crc kubenswrapper[4768]: I1203 17:14:56.028297 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:14:56 crc kubenswrapper[4768]: I1203 17:14:56.028845 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:14:56 crc kubenswrapper[4768]: I1203 17:14:56.965267 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nbh9l"] Dec 03 17:14:56 crc kubenswrapper[4768]: I1203 17:14:56.974082 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:56 crc kubenswrapper[4768]: I1203 17:14:56.985746 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbh9l"] Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.149805 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m9t5\" (UniqueName: \"kubernetes.io/projected/db12e07a-22f8-46d5-a68c-29d8c35b9762-kube-api-access-8m9t5\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.149917 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-catalog-content\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.150112 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-utilities\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.252443 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-utilities\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.252967 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-utilities\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.253063 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m9t5\" (UniqueName: \"kubernetes.io/projected/db12e07a-22f8-46d5-a68c-29d8c35b9762-kube-api-access-8m9t5\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.253119 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-catalog-content\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.253639 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-catalog-content\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.276051 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m9t5\" (UniqueName: \"kubernetes.io/projected/db12e07a-22f8-46d5-a68c-29d8c35b9762-kube-api-access-8m9t5\") pod \"redhat-operators-nbh9l\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.306818 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:14:57 crc kubenswrapper[4768]: I1203 17:14:57.838409 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nbh9l"] Dec 03 17:14:58 crc kubenswrapper[4768]: I1203 17:14:58.295759 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbh9l" event={"ID":"db12e07a-22f8-46d5-a68c-29d8c35b9762","Type":"ContainerStarted","Data":"b20e500e358513fa10e00a6e38895aeeda5c8d09f60d7375a5c8b27f06cde5e8"} Dec 03 17:14:59 crc kubenswrapper[4768]: I1203 17:14:59.308817 4768 generic.go:334] "Generic (PLEG): container finished" podID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerID="ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89" exitCode=0 Dec 03 17:14:59 crc kubenswrapper[4768]: I1203 17:14:59.308964 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbh9l" event={"ID":"db12e07a-22f8-46d5-a68c-29d8c35b9762","Type":"ContainerDied","Data":"ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89"} Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.156208 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4"] Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.158626 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.163336 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.163681 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.173282 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4"] Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.318936 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7b07b85-d44d-43eb-8ebb-54e27f278e17-config-volume\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.319037 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht6bh\" (UniqueName: \"kubernetes.io/projected/f7b07b85-d44d-43eb-8ebb-54e27f278e17-kube-api-access-ht6bh\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.319295 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7b07b85-d44d-43eb-8ebb-54e27f278e17-secret-volume\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.421170 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht6bh\" (UniqueName: \"kubernetes.io/projected/f7b07b85-d44d-43eb-8ebb-54e27f278e17-kube-api-access-ht6bh\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.421628 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7b07b85-d44d-43eb-8ebb-54e27f278e17-secret-volume\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.421769 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7b07b85-d44d-43eb-8ebb-54e27f278e17-config-volume\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.422894 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7b07b85-d44d-43eb-8ebb-54e27f278e17-config-volume\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.436994 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7b07b85-d44d-43eb-8ebb-54e27f278e17-secret-volume\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.459878 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht6bh\" (UniqueName: \"kubernetes.io/projected/f7b07b85-d44d-43eb-8ebb-54e27f278e17-kube-api-access-ht6bh\") pod \"collect-profiles-29413035-jk4d4\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:00 crc kubenswrapper[4768]: I1203 17:15:00.501136 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:01 crc kubenswrapper[4768]: I1203 17:15:01.114931 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4"] Dec 03 17:15:01 crc kubenswrapper[4768]: W1203 17:15:01.120324 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7b07b85_d44d_43eb_8ebb_54e27f278e17.slice/crio-4c3c2f0f5991e8ec3dbc0a1ccf025c45e6d9857279fcfae5a2eb75cc3104f4cb WatchSource:0}: Error finding container 4c3c2f0f5991e8ec3dbc0a1ccf025c45e6d9857279fcfae5a2eb75cc3104f4cb: Status 404 returned error can't find the container with id 4c3c2f0f5991e8ec3dbc0a1ccf025c45e6d9857279fcfae5a2eb75cc3104f4cb Dec 03 17:15:01 crc kubenswrapper[4768]: I1203 17:15:01.330476 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" event={"ID":"f7b07b85-d44d-43eb-8ebb-54e27f278e17","Type":"ContainerStarted","Data":"4c3c2f0f5991e8ec3dbc0a1ccf025c45e6d9857279fcfae5a2eb75cc3104f4cb"} Dec 03 17:15:01 crc kubenswrapper[4768]: I1203 17:15:01.333370 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbh9l" event={"ID":"db12e07a-22f8-46d5-a68c-29d8c35b9762","Type":"ContainerStarted","Data":"47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517"} Dec 03 17:15:02 crc kubenswrapper[4768]: I1203 17:15:02.349802 4768 generic.go:334] "Generic (PLEG): container finished" podID="f7b07b85-d44d-43eb-8ebb-54e27f278e17" containerID="3d3977ca8cb2ec44c6b0a6b891fdb502918b828e99d76facfde4f908579d196a" exitCode=0 Dec 03 17:15:02 crc kubenswrapper[4768]: I1203 17:15:02.349870 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" event={"ID":"f7b07b85-d44d-43eb-8ebb-54e27f278e17","Type":"ContainerDied","Data":"3d3977ca8cb2ec44c6b0a6b891fdb502918b828e99d76facfde4f908579d196a"} Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.769720 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.839232 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ht6bh\" (UniqueName: \"kubernetes.io/projected/f7b07b85-d44d-43eb-8ebb-54e27f278e17-kube-api-access-ht6bh\") pod \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.839443 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7b07b85-d44d-43eb-8ebb-54e27f278e17-config-volume\") pod \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.839575 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7b07b85-d44d-43eb-8ebb-54e27f278e17-secret-volume\") pod \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\" (UID: \"f7b07b85-d44d-43eb-8ebb-54e27f278e17\") " Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.840153 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7b07b85-d44d-43eb-8ebb-54e27f278e17-config-volume" (OuterVolumeSpecName: "config-volume") pod "f7b07b85-d44d-43eb-8ebb-54e27f278e17" (UID: "f7b07b85-d44d-43eb-8ebb-54e27f278e17"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.840564 4768 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7b07b85-d44d-43eb-8ebb-54e27f278e17-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.847990 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b07b85-d44d-43eb-8ebb-54e27f278e17-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f7b07b85-d44d-43eb-8ebb-54e27f278e17" (UID: "f7b07b85-d44d-43eb-8ebb-54e27f278e17"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.848148 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7b07b85-d44d-43eb-8ebb-54e27f278e17-kube-api-access-ht6bh" (OuterVolumeSpecName: "kube-api-access-ht6bh") pod "f7b07b85-d44d-43eb-8ebb-54e27f278e17" (UID: "f7b07b85-d44d-43eb-8ebb-54e27f278e17"). InnerVolumeSpecName "kube-api-access-ht6bh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.945234 4768 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7b07b85-d44d-43eb-8ebb-54e27f278e17-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:04 crc kubenswrapper[4768]: I1203 17:15:04.945302 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ht6bh\" (UniqueName: \"kubernetes.io/projected/f7b07b85-d44d-43eb-8ebb-54e27f278e17-kube-api-access-ht6bh\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:05 crc kubenswrapper[4768]: I1203 17:15:05.391565 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" Dec 03 17:15:05 crc kubenswrapper[4768]: I1203 17:15:05.391561 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413035-jk4d4" event={"ID":"f7b07b85-d44d-43eb-8ebb-54e27f278e17","Type":"ContainerDied","Data":"4c3c2f0f5991e8ec3dbc0a1ccf025c45e6d9857279fcfae5a2eb75cc3104f4cb"} Dec 03 17:15:05 crc kubenswrapper[4768]: I1203 17:15:05.392213 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c3c2f0f5991e8ec3dbc0a1ccf025c45e6d9857279fcfae5a2eb75cc3104f4cb" Dec 03 17:15:05 crc kubenswrapper[4768]: I1203 17:15:05.395888 4768 generic.go:334] "Generic (PLEG): container finished" podID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerID="47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517" exitCode=0 Dec 03 17:15:05 crc kubenswrapper[4768]: I1203 17:15:05.395925 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbh9l" event={"ID":"db12e07a-22f8-46d5-a68c-29d8c35b9762","Type":"ContainerDied","Data":"47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517"} Dec 03 17:15:05 crc kubenswrapper[4768]: I1203 17:15:05.871906 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn"] Dec 03 17:15:05 crc kubenswrapper[4768]: I1203 17:15:05.885942 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29412990-22drn"] Dec 03 17:15:06 crc kubenswrapper[4768]: I1203 17:15:06.409729 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbh9l" event={"ID":"db12e07a-22f8-46d5-a68c-29d8c35b9762","Type":"ContainerStarted","Data":"5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7"} Dec 03 17:15:06 crc kubenswrapper[4768]: I1203 17:15:06.436345 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nbh9l" podStartSLOduration=3.8419523140000003 podStartE2EDuration="10.436311707s" podCreationTimestamp="2025-12-03 17:14:56 +0000 UTC" firstStartedPulling="2025-12-03 17:14:59.310933247 +0000 UTC m=+3396.230269670" lastFinishedPulling="2025-12-03 17:15:05.90529265 +0000 UTC m=+3402.824629063" observedRunningTime="2025-12-03 17:15:06.433897942 +0000 UTC m=+3403.353234385" watchObservedRunningTime="2025-12-03 17:15:06.436311707 +0000 UTC m=+3403.355648130" Dec 03 17:15:07 crc kubenswrapper[4768]: I1203 17:15:07.307462 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:15:07 crc kubenswrapper[4768]: I1203 17:15:07.307941 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:15:07 crc kubenswrapper[4768]: I1203 17:15:07.543883 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7718e65-4028-4ef0-ab6c-06de3b0cab04" path="/var/lib/kubelet/pods/b7718e65-4028-4ef0-ab6c-06de3b0cab04/volumes" Dec 03 17:15:08 crc kubenswrapper[4768]: I1203 17:15:08.362867 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nbh9l" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="registry-server" probeResult="failure" output=< Dec 03 17:15:08 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 17:15:08 crc kubenswrapper[4768]: > Dec 03 17:15:17 crc kubenswrapper[4768]: I1203 17:15:17.639844 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:15:17 crc kubenswrapper[4768]: I1203 17:15:17.692358 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:15:17 crc kubenswrapper[4768]: I1203 17:15:17.880440 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbh9l"] Dec 03 17:15:19 crc kubenswrapper[4768]: I1203 17:15:19.601784 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nbh9l" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="registry-server" containerID="cri-o://5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7" gracePeriod=2 Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.120037 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.314332 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-utilities\") pod \"db12e07a-22f8-46d5-a68c-29d8c35b9762\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.314509 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-catalog-content\") pod \"db12e07a-22f8-46d5-a68c-29d8c35b9762\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.314567 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m9t5\" (UniqueName: \"kubernetes.io/projected/db12e07a-22f8-46d5-a68c-29d8c35b9762-kube-api-access-8m9t5\") pod \"db12e07a-22f8-46d5-a68c-29d8c35b9762\" (UID: \"db12e07a-22f8-46d5-a68c-29d8c35b9762\") " Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.315231 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-utilities" (OuterVolumeSpecName: "utilities") pod "db12e07a-22f8-46d5-a68c-29d8c35b9762" (UID: "db12e07a-22f8-46d5-a68c-29d8c35b9762"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.324750 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db12e07a-22f8-46d5-a68c-29d8c35b9762-kube-api-access-8m9t5" (OuterVolumeSpecName: "kube-api-access-8m9t5") pod "db12e07a-22f8-46d5-a68c-29d8c35b9762" (UID: "db12e07a-22f8-46d5-a68c-29d8c35b9762"). InnerVolumeSpecName "kube-api-access-8m9t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.418055 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.418097 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m9t5\" (UniqueName: \"kubernetes.io/projected/db12e07a-22f8-46d5-a68c-29d8c35b9762-kube-api-access-8m9t5\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.419533 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "db12e07a-22f8-46d5-a68c-29d8c35b9762" (UID: "db12e07a-22f8-46d5-a68c-29d8c35b9762"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.519261 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db12e07a-22f8-46d5-a68c-29d8c35b9762-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.615762 4768 generic.go:334] "Generic (PLEG): container finished" podID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerID="5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7" exitCode=0 Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.616119 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbh9l" event={"ID":"db12e07a-22f8-46d5-a68c-29d8c35b9762","Type":"ContainerDied","Data":"5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7"} Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.616157 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nbh9l" event={"ID":"db12e07a-22f8-46d5-a68c-29d8c35b9762","Type":"ContainerDied","Data":"b20e500e358513fa10e00a6e38895aeeda5c8d09f60d7375a5c8b27f06cde5e8"} Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.616196 4768 scope.go:117] "RemoveContainer" containerID="5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.616405 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nbh9l" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.643809 4768 scope.go:117] "RemoveContainer" containerID="47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.698835 4768 scope.go:117] "RemoveContainer" containerID="ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.705134 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nbh9l"] Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.717974 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nbh9l"] Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.747937 4768 scope.go:117] "RemoveContainer" containerID="5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7" Dec 03 17:15:20 crc kubenswrapper[4768]: E1203 17:15:20.748417 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7\": container with ID starting with 5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7 not found: ID does not exist" containerID="5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.748461 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7"} err="failed to get container status \"5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7\": rpc error: code = NotFound desc = could not find container \"5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7\": container with ID starting with 5b5ce4df77e1a956cdc9b0e1dbf84e319e91e6437774a6a0c2f281e94ffc47d7 not found: ID does not exist" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.748513 4768 scope.go:117] "RemoveContainer" containerID="47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517" Dec 03 17:15:20 crc kubenswrapper[4768]: E1203 17:15:20.748919 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517\": container with ID starting with 47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517 not found: ID does not exist" containerID="47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.749038 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517"} err="failed to get container status \"47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517\": rpc error: code = NotFound desc = could not find container \"47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517\": container with ID starting with 47363a92d4460adacdfaacaff5287b1eaadfecac09658afa626d7edb11942517 not found: ID does not exist" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.749150 4768 scope.go:117] "RemoveContainer" containerID="ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89" Dec 03 17:15:20 crc kubenswrapper[4768]: E1203 17:15:20.749564 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89\": container with ID starting with ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89 not found: ID does not exist" containerID="ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89" Dec 03 17:15:20 crc kubenswrapper[4768]: I1203 17:15:20.749624 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89"} err="failed to get container status \"ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89\": rpc error: code = NotFound desc = could not find container \"ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89\": container with ID starting with ef9588103e07703ba0ac6acbb7ed1efedf9101b90b4574b84dcef9126a1b4a89 not found: ID does not exist" Dec 03 17:15:21 crc kubenswrapper[4768]: I1203 17:15:21.563209 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" path="/var/lib/kubelet/pods/db12e07a-22f8-46d5-a68c-29d8c35b9762/volumes" Dec 03 17:15:26 crc kubenswrapper[4768]: I1203 17:15:26.028267 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:15:26 crc kubenswrapper[4768]: I1203 17:15:26.029002 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.379383 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ncxs5"] Dec 03 17:15:33 crc kubenswrapper[4768]: E1203 17:15:33.380220 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="extract-utilities" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.380233 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="extract-utilities" Dec 03 17:15:33 crc kubenswrapper[4768]: E1203 17:15:33.380248 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="registry-server" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.380255 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="registry-server" Dec 03 17:15:33 crc kubenswrapper[4768]: E1203 17:15:33.380282 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="extract-content" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.380288 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="extract-content" Dec 03 17:15:33 crc kubenswrapper[4768]: E1203 17:15:33.380306 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7b07b85-d44d-43eb-8ebb-54e27f278e17" containerName="collect-profiles" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.380312 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7b07b85-d44d-43eb-8ebb-54e27f278e17" containerName="collect-profiles" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.380496 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="db12e07a-22f8-46d5-a68c-29d8c35b9762" containerName="registry-server" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.380513 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7b07b85-d44d-43eb-8ebb-54e27f278e17" containerName="collect-profiles" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.382433 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.405609 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ncxs5"] Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.539028 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-catalog-content\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.539154 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6t7pt\" (UniqueName: \"kubernetes.io/projected/cf188536-68c5-430e-aa6c-7ce6a0d50e79-kube-api-access-6t7pt\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.539186 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-utilities\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.640812 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-catalog-content\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.640894 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6t7pt\" (UniqueName: \"kubernetes.io/projected/cf188536-68c5-430e-aa6c-7ce6a0d50e79-kube-api-access-6t7pt\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.640916 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-utilities\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.641693 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-utilities\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.641856 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-catalog-content\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.661915 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6t7pt\" (UniqueName: \"kubernetes.io/projected/cf188536-68c5-430e-aa6c-7ce6a0d50e79-kube-api-access-6t7pt\") pod \"community-operators-ncxs5\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:33 crc kubenswrapper[4768]: I1203 17:15:33.713740 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:34 crc kubenswrapper[4768]: I1203 17:15:34.299837 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ncxs5"] Dec 03 17:15:34 crc kubenswrapper[4768]: I1203 17:15:34.798093 4768 generic.go:334] "Generic (PLEG): container finished" podID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerID="faffc3552423e373094766f5c9795c1dd7a413ba45b51108907791873ad91f5f" exitCode=0 Dec 03 17:15:34 crc kubenswrapper[4768]: I1203 17:15:34.798230 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ncxs5" event={"ID":"cf188536-68c5-430e-aa6c-7ce6a0d50e79","Type":"ContainerDied","Data":"faffc3552423e373094766f5c9795c1dd7a413ba45b51108907791873ad91f5f"} Dec 03 17:15:34 crc kubenswrapper[4768]: I1203 17:15:34.803104 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ncxs5" event={"ID":"cf188536-68c5-430e-aa6c-7ce6a0d50e79","Type":"ContainerStarted","Data":"e465ee2e6c567f7724ddd202e95811ca81e0e9dc5a79f8cfcb5d344c959c5ce7"} Dec 03 17:15:35 crc kubenswrapper[4768]: I1203 17:15:35.814955 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ncxs5" event={"ID":"cf188536-68c5-430e-aa6c-7ce6a0d50e79","Type":"ContainerStarted","Data":"25c3ed81640b5ad417a70c2037c7e48299d871c22e33d21ed8150da1eb1fd52c"} Dec 03 17:15:36 crc kubenswrapper[4768]: I1203 17:15:36.828871 4768 generic.go:334] "Generic (PLEG): container finished" podID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerID="25c3ed81640b5ad417a70c2037c7e48299d871c22e33d21ed8150da1eb1fd52c" exitCode=0 Dec 03 17:15:36 crc kubenswrapper[4768]: I1203 17:15:36.829004 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ncxs5" event={"ID":"cf188536-68c5-430e-aa6c-7ce6a0d50e79","Type":"ContainerDied","Data":"25c3ed81640b5ad417a70c2037c7e48299d871c22e33d21ed8150da1eb1fd52c"} Dec 03 17:15:37 crc kubenswrapper[4768]: I1203 17:15:37.841163 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ncxs5" event={"ID":"cf188536-68c5-430e-aa6c-7ce6a0d50e79","Type":"ContainerStarted","Data":"414086e27168d6ec191f57d7d87bfbf07d8a891443b6cd552391ee75ef55e12e"} Dec 03 17:15:37 crc kubenswrapper[4768]: I1203 17:15:37.876061 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ncxs5" podStartSLOduration=2.2806904 podStartE2EDuration="4.876039468s" podCreationTimestamp="2025-12-03 17:15:33 +0000 UTC" firstStartedPulling="2025-12-03 17:15:34.800368712 +0000 UTC m=+3431.719705135" lastFinishedPulling="2025-12-03 17:15:37.39571778 +0000 UTC m=+3434.315054203" observedRunningTime="2025-12-03 17:15:37.867995191 +0000 UTC m=+3434.787331614" watchObservedRunningTime="2025-12-03 17:15:37.876039468 +0000 UTC m=+3434.795375891" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.193880 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5pjwq"] Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.196855 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.223844 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5pjwq"] Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.357164 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-utilities\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.357435 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwspl\" (UniqueName: \"kubernetes.io/projected/6817f707-6a48-4ba8-90b1-fbf9212b6520-kube-api-access-nwspl\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.357730 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-catalog-content\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.460268 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwspl\" (UniqueName: \"kubernetes.io/projected/6817f707-6a48-4ba8-90b1-fbf9212b6520-kube-api-access-nwspl\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.460431 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-catalog-content\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.460540 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-utilities\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.461183 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-utilities\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.461385 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-catalog-content\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.486188 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwspl\" (UniqueName: \"kubernetes.io/projected/6817f707-6a48-4ba8-90b1-fbf9212b6520-kube-api-access-nwspl\") pod \"certified-operators-5pjwq\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:38 crc kubenswrapper[4768]: I1203 17:15:38.537069 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:39 crc kubenswrapper[4768]: I1203 17:15:39.136437 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5pjwq"] Dec 03 17:15:39 crc kubenswrapper[4768]: W1203 17:15:39.145996 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6817f707_6a48_4ba8_90b1_fbf9212b6520.slice/crio-0fd80cf2dabc6f8bfb67f87b99d43d116dfb2558e0c50da1416c406372e94568 WatchSource:0}: Error finding container 0fd80cf2dabc6f8bfb67f87b99d43d116dfb2558e0c50da1416c406372e94568: Status 404 returned error can't find the container with id 0fd80cf2dabc6f8bfb67f87b99d43d116dfb2558e0c50da1416c406372e94568 Dec 03 17:15:39 crc kubenswrapper[4768]: I1203 17:15:39.874155 4768 generic.go:334] "Generic (PLEG): container finished" podID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerID="d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b" exitCode=0 Dec 03 17:15:39 crc kubenswrapper[4768]: I1203 17:15:39.874261 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pjwq" event={"ID":"6817f707-6a48-4ba8-90b1-fbf9212b6520","Type":"ContainerDied","Data":"d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b"} Dec 03 17:15:39 crc kubenswrapper[4768]: I1203 17:15:39.874696 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pjwq" event={"ID":"6817f707-6a48-4ba8-90b1-fbf9212b6520","Type":"ContainerStarted","Data":"0fd80cf2dabc6f8bfb67f87b99d43d116dfb2558e0c50da1416c406372e94568"} Dec 03 17:15:40 crc kubenswrapper[4768]: I1203 17:15:40.890818 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pjwq" event={"ID":"6817f707-6a48-4ba8-90b1-fbf9212b6520","Type":"ContainerStarted","Data":"ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464"} Dec 03 17:15:43 crc kubenswrapper[4768]: I1203 17:15:43.714252 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:43 crc kubenswrapper[4768]: I1203 17:15:43.715272 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:43 crc kubenswrapper[4768]: I1203 17:15:43.772909 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:43 crc kubenswrapper[4768]: I1203 17:15:43.928731 4768 generic.go:334] "Generic (PLEG): container finished" podID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerID="ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464" exitCode=0 Dec 03 17:15:43 crc kubenswrapper[4768]: I1203 17:15:43.929274 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pjwq" event={"ID":"6817f707-6a48-4ba8-90b1-fbf9212b6520","Type":"ContainerDied","Data":"ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464"} Dec 03 17:15:43 crc kubenswrapper[4768]: I1203 17:15:43.989187 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:44 crc kubenswrapper[4768]: I1203 17:15:44.948171 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pjwq" event={"ID":"6817f707-6a48-4ba8-90b1-fbf9212b6520","Type":"ContainerStarted","Data":"8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108"} Dec 03 17:15:45 crc kubenswrapper[4768]: I1203 17:15:45.359368 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ncxs5"] Dec 03 17:15:45 crc kubenswrapper[4768]: I1203 17:15:45.959554 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ncxs5" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="registry-server" containerID="cri-o://414086e27168d6ec191f57d7d87bfbf07d8a891443b6cd552391ee75ef55e12e" gracePeriod=2 Dec 03 17:15:46 crc kubenswrapper[4768]: I1203 17:15:46.012551 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5pjwq" podStartSLOduration=3.243980706 podStartE2EDuration="8.012532526s" podCreationTimestamp="2025-12-03 17:15:38 +0000 UTC" firstStartedPulling="2025-12-03 17:15:39.876814985 +0000 UTC m=+3436.796151408" lastFinishedPulling="2025-12-03 17:15:44.645366805 +0000 UTC m=+3441.564703228" observedRunningTime="2025-12-03 17:15:45.9964139 +0000 UTC m=+3442.915750333" watchObservedRunningTime="2025-12-03 17:15:46.012532526 +0000 UTC m=+3442.931868949" Dec 03 17:15:46 crc kubenswrapper[4768]: I1203 17:15:46.979172 4768 generic.go:334] "Generic (PLEG): container finished" podID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerID="414086e27168d6ec191f57d7d87bfbf07d8a891443b6cd552391ee75ef55e12e" exitCode=0 Dec 03 17:15:46 crc kubenswrapper[4768]: I1203 17:15:46.979259 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ncxs5" event={"ID":"cf188536-68c5-430e-aa6c-7ce6a0d50e79","Type":"ContainerDied","Data":"414086e27168d6ec191f57d7d87bfbf07d8a891443b6cd552391ee75ef55e12e"} Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.432923 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.516493 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-catalog-content\") pod \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.516785 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6t7pt\" (UniqueName: \"kubernetes.io/projected/cf188536-68c5-430e-aa6c-7ce6a0d50e79-kube-api-access-6t7pt\") pod \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.516910 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-utilities\") pod \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\" (UID: \"cf188536-68c5-430e-aa6c-7ce6a0d50e79\") " Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.518723 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-utilities" (OuterVolumeSpecName: "utilities") pod "cf188536-68c5-430e-aa6c-7ce6a0d50e79" (UID: "cf188536-68c5-430e-aa6c-7ce6a0d50e79"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.537012 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf188536-68c5-430e-aa6c-7ce6a0d50e79-kube-api-access-6t7pt" (OuterVolumeSpecName: "kube-api-access-6t7pt") pod "cf188536-68c5-430e-aa6c-7ce6a0d50e79" (UID: "cf188536-68c5-430e-aa6c-7ce6a0d50e79"). InnerVolumeSpecName "kube-api-access-6t7pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.592946 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf188536-68c5-430e-aa6c-7ce6a0d50e79" (UID: "cf188536-68c5-430e-aa6c-7ce6a0d50e79"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.620207 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.620276 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6t7pt\" (UniqueName: \"kubernetes.io/projected/cf188536-68c5-430e-aa6c-7ce6a0d50e79-kube-api-access-6t7pt\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.620291 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf188536-68c5-430e-aa6c-7ce6a0d50e79-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.996523 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ncxs5" event={"ID":"cf188536-68c5-430e-aa6c-7ce6a0d50e79","Type":"ContainerDied","Data":"e465ee2e6c567f7724ddd202e95811ca81e0e9dc5a79f8cfcb5d344c959c5ce7"} Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.997086 4768 scope.go:117] "RemoveContainer" containerID="414086e27168d6ec191f57d7d87bfbf07d8a891443b6cd552391ee75ef55e12e" Dec 03 17:15:47 crc kubenswrapper[4768]: I1203 17:15:47.997275 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ncxs5" Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.042967 4768 scope.go:117] "RemoveContainer" containerID="25c3ed81640b5ad417a70c2037c7e48299d871c22e33d21ed8150da1eb1fd52c" Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.060547 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ncxs5"] Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.070261 4768 scope.go:117] "RemoveContainer" containerID="faffc3552423e373094766f5c9795c1dd7a413ba45b51108907791873ad91f5f" Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.078439 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ncxs5"] Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.451152 4768 scope.go:117] "RemoveContainer" containerID="dcb266857adde0975c5e865252d13c25fb28d7a56e4597347be77545e526ab7a" Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.538047 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.538109 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:48 crc kubenswrapper[4768]: I1203 17:15:48.599920 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:49 crc kubenswrapper[4768]: I1203 17:15:49.084328 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:49 crc kubenswrapper[4768]: I1203 17:15:49.588650 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" path="/var/lib/kubelet/pods/cf188536-68c5-430e-aa6c-7ce6a0d50e79/volumes" Dec 03 17:15:50 crc kubenswrapper[4768]: I1203 17:15:50.752801 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5pjwq"] Dec 03 17:15:51 crc kubenswrapper[4768]: I1203 17:15:51.041995 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5pjwq" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="registry-server" containerID="cri-o://8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108" gracePeriod=2 Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.700303 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.763065 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-catalog-content\") pod \"6817f707-6a48-4ba8-90b1-fbf9212b6520\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.763415 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-utilities\") pod \"6817f707-6a48-4ba8-90b1-fbf9212b6520\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.763681 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwspl\" (UniqueName: \"kubernetes.io/projected/6817f707-6a48-4ba8-90b1-fbf9212b6520-kube-api-access-nwspl\") pod \"6817f707-6a48-4ba8-90b1-fbf9212b6520\" (UID: \"6817f707-6a48-4ba8-90b1-fbf9212b6520\") " Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.766018 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-utilities" (OuterVolumeSpecName: "utilities") pod "6817f707-6a48-4ba8-90b1-fbf9212b6520" (UID: "6817f707-6a48-4ba8-90b1-fbf9212b6520"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.780906 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6817f707-6a48-4ba8-90b1-fbf9212b6520-kube-api-access-nwspl" (OuterVolumeSpecName: "kube-api-access-nwspl") pod "6817f707-6a48-4ba8-90b1-fbf9212b6520" (UID: "6817f707-6a48-4ba8-90b1-fbf9212b6520"). InnerVolumeSpecName "kube-api-access-nwspl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.839342 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6817f707-6a48-4ba8-90b1-fbf9212b6520" (UID: "6817f707-6a48-4ba8-90b1-fbf9212b6520"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.866616 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.866649 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwspl\" (UniqueName: \"kubernetes.io/projected/6817f707-6a48-4ba8-90b1-fbf9212b6520-kube-api-access-nwspl\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:51.866661 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6817f707-6a48-4ba8-90b1-fbf9212b6520-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.058992 4768 generic.go:334] "Generic (PLEG): container finished" podID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerID="8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108" exitCode=0 Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.059036 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pjwq" event={"ID":"6817f707-6a48-4ba8-90b1-fbf9212b6520","Type":"ContainerDied","Data":"8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108"} Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.059087 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5pjwq" event={"ID":"6817f707-6a48-4ba8-90b1-fbf9212b6520","Type":"ContainerDied","Data":"0fd80cf2dabc6f8bfb67f87b99d43d116dfb2558e0c50da1416c406372e94568"} Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.059110 4768 scope.go:117] "RemoveContainer" containerID="8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.059272 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5pjwq" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.093715 4768 scope.go:117] "RemoveContainer" containerID="ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.103096 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5pjwq"] Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.112621 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5pjwq"] Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.125578 4768 scope.go:117] "RemoveContainer" containerID="d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.181114 4768 scope.go:117] "RemoveContainer" containerID="8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108" Dec 03 17:15:52 crc kubenswrapper[4768]: E1203 17:15:52.181917 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108\": container with ID starting with 8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108 not found: ID does not exist" containerID="8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.181959 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108"} err="failed to get container status \"8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108\": rpc error: code = NotFound desc = could not find container \"8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108\": container with ID starting with 8690f4e042a84bcd2bbbaba09a6b928743dc10e43fcbf18102dcad6738eb7108 not found: ID does not exist" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.181993 4768 scope.go:117] "RemoveContainer" containerID="ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464" Dec 03 17:15:52 crc kubenswrapper[4768]: E1203 17:15:52.182358 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464\": container with ID starting with ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464 not found: ID does not exist" containerID="ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.182380 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464"} err="failed to get container status \"ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464\": rpc error: code = NotFound desc = could not find container \"ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464\": container with ID starting with ee73a42643f145e9df9b0e715e852ea8336af8a29cf03aef7b5ac312d9ce6464 not found: ID does not exist" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.182396 4768 scope.go:117] "RemoveContainer" containerID="d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b" Dec 03 17:15:52 crc kubenswrapper[4768]: E1203 17:15:52.182850 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b\": container with ID starting with d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b not found: ID does not exist" containerID="d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b" Dec 03 17:15:52 crc kubenswrapper[4768]: I1203 17:15:52.182874 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b"} err="failed to get container status \"d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b\": rpc error: code = NotFound desc = could not find container \"d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b\": container with ID starting with d5f7e892e9ecda61e8f73688c47f79629a9a5746b4f5d5e8c6974a74b8e8319b not found: ID does not exist" Dec 03 17:15:53 crc kubenswrapper[4768]: I1203 17:15:53.545171 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" path="/var/lib/kubelet/pods/6817f707-6a48-4ba8-90b1-fbf9212b6520/volumes" Dec 03 17:15:56 crc kubenswrapper[4768]: I1203 17:15:56.028131 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:15:56 crc kubenswrapper[4768]: I1203 17:15:56.029164 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:15:56 crc kubenswrapper[4768]: I1203 17:15:56.029230 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 17:15:56 crc kubenswrapper[4768]: I1203 17:15:56.030295 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"66ff691d2a6da1d5243d7e74510489b51f924821d075bd7a68e9d550b97250eb"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:15:56 crc kubenswrapper[4768]: I1203 17:15:56.030360 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://66ff691d2a6da1d5243d7e74510489b51f924821d075bd7a68e9d550b97250eb" gracePeriod=600 Dec 03 17:15:57 crc kubenswrapper[4768]: I1203 17:15:57.125302 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="66ff691d2a6da1d5243d7e74510489b51f924821d075bd7a68e9d550b97250eb" exitCode=0 Dec 03 17:15:57 crc kubenswrapper[4768]: I1203 17:15:57.125522 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"66ff691d2a6da1d5243d7e74510489b51f924821d075bd7a68e9d550b97250eb"} Dec 03 17:15:57 crc kubenswrapper[4768]: I1203 17:15:57.125801 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70"} Dec 03 17:15:57 crc kubenswrapper[4768]: I1203 17:15:57.125835 4768 scope.go:117] "RemoveContainer" containerID="de54c3187bc0e8c89a4d9ae8f07fd3f44d5e3a6bafed6b6ccad2d630c160e4cb" Dec 03 17:17:56 crc kubenswrapper[4768]: I1203 17:17:56.027898 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:17:56 crc kubenswrapper[4768]: I1203 17:17:56.028351 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:18:26 crc kubenswrapper[4768]: I1203 17:18:26.028952 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:18:26 crc kubenswrapper[4768]: I1203 17:18:26.029649 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.028470 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.029145 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.029199 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.030065 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.030135 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" gracePeriod=600 Dec 03 17:18:56 crc kubenswrapper[4768]: E1203 17:18:56.150500 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.552511 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" exitCode=0 Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.552579 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70"} Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.552923 4768 scope.go:117] "RemoveContainer" containerID="66ff691d2a6da1d5243d7e74510489b51f924821d075bd7a68e9d550b97250eb" Dec 03 17:18:56 crc kubenswrapper[4768]: I1203 17:18:56.553785 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:18:56 crc kubenswrapper[4768]: E1203 17:18:56.554133 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:19:11 crc kubenswrapper[4768]: I1203 17:19:11.532504 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:19:11 crc kubenswrapper[4768]: E1203 17:19:11.533525 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:19:24 crc kubenswrapper[4768]: I1203 17:19:24.532960 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:19:24 crc kubenswrapper[4768]: E1203 17:19:24.533721 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:19:37 crc kubenswrapper[4768]: I1203 17:19:37.532875 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:19:37 crc kubenswrapper[4768]: E1203 17:19:37.533944 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:19:51 crc kubenswrapper[4768]: I1203 17:19:51.531648 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:19:51 crc kubenswrapper[4768]: E1203 17:19:51.532412 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:20:04 crc kubenswrapper[4768]: I1203 17:20:04.532345 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:20:04 crc kubenswrapper[4768]: E1203 17:20:04.534240 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:20:11 crc kubenswrapper[4768]: I1203 17:20:11.846936 4768 generic.go:334] "Generic (PLEG): container finished" podID="430c4af3-d01b-4096-b87c-4adce312cb1b" containerID="bce380387f0492163161fbdd69715fa751e2d5ce3c9fb424523280ad1160ffab" exitCode=0 Dec 03 17:20:11 crc kubenswrapper[4768]: I1203 17:20:11.847011 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"430c4af3-d01b-4096-b87c-4adce312cb1b","Type":"ContainerDied","Data":"bce380387f0492163161fbdd69715fa751e2d5ce3c9fb424523280ad1160ffab"} Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.485671 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654056 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-workdir\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654122 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654143 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ca-certs\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654168 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-temporary\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654206 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config-secret\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654276 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-config-data\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654307 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ssh-key\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654330 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mz2lb\" (UniqueName: \"kubernetes.io/projected/430c4af3-d01b-4096-b87c-4adce312cb1b-kube-api-access-mz2lb\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.654385 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config\") pod \"430c4af3-d01b-4096-b87c-4adce312cb1b\" (UID: \"430c4af3-d01b-4096-b87c-4adce312cb1b\") " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.655302 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.655392 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-config-data" (OuterVolumeSpecName: "config-data") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.662273 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/430c4af3-d01b-4096-b87c-4adce312cb1b-kube-api-access-mz2lb" (OuterVolumeSpecName: "kube-api-access-mz2lb") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "kube-api-access-mz2lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.673817 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "test-operator-logs") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.697499 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.697567 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.698544 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.721062 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756736 4768 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756795 4768 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756808 4768 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756823 4768 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756838 4768 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-config-data\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756849 4768 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/430c4af3-d01b-4096-b87c-4adce312cb1b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756860 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mz2lb\" (UniqueName: \"kubernetes.io/projected/430c4af3-d01b-4096-b87c-4adce312cb1b-kube-api-access-mz2lb\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.756871 4768 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/430c4af3-d01b-4096-b87c-4adce312cb1b-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.781744 4768 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.860153 4768 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.867952 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"430c4af3-d01b-4096-b87c-4adce312cb1b","Type":"ContainerDied","Data":"35a99aeae36ae1d44ad18e0ad1cf29ada28bfc0c94e5239684f6e764876b9587"} Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.867988 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35a99aeae36ae1d44ad18e0ad1cf29ada28bfc0c94e5239684f6e764876b9587" Dec 03 17:20:13 crc kubenswrapper[4768]: I1203 17:20:13.868049 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 03 17:20:14 crc kubenswrapper[4768]: I1203 17:20:14.124057 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "430c4af3-d01b-4096-b87c-4adce312cb1b" (UID: "430c4af3-d01b-4096-b87c-4adce312cb1b"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:20:14 crc kubenswrapper[4768]: I1203 17:20:14.167987 4768 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/430c4af3-d01b-4096-b87c-4adce312cb1b-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 03 17:20:15 crc kubenswrapper[4768]: I1203 17:20:15.531854 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:20:15 crc kubenswrapper[4768]: E1203 17:20:15.532447 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.888810 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 17:20:17 crc kubenswrapper[4768]: E1203 17:20:17.890167 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="registry-server" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.890189 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="registry-server" Dec 03 17:20:17 crc kubenswrapper[4768]: E1203 17:20:17.890231 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="extract-utilities" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.890241 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="extract-utilities" Dec 03 17:20:17 crc kubenswrapper[4768]: E1203 17:20:17.890287 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="registry-server" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.890302 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="registry-server" Dec 03 17:20:17 crc kubenswrapper[4768]: E1203 17:20:17.890334 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="430c4af3-d01b-4096-b87c-4adce312cb1b" containerName="tempest-tests-tempest-tests-runner" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.890345 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="430c4af3-d01b-4096-b87c-4adce312cb1b" containerName="tempest-tests-tempest-tests-runner" Dec 03 17:20:17 crc kubenswrapper[4768]: E1203 17:20:17.890381 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="extract-utilities" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.890390 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="extract-utilities" Dec 03 17:20:17 crc kubenswrapper[4768]: E1203 17:20:17.890414 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="extract-content" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.890424 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="extract-content" Dec 03 17:20:17 crc kubenswrapper[4768]: E1203 17:20:17.890457 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="extract-content" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.890466 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="extract-content" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.891004 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="6817f707-6a48-4ba8-90b1-fbf9212b6520" containerName="registry-server" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.891057 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf188536-68c5-430e-aa6c-7ce6a0d50e79" containerName="registry-server" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.891098 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="430c4af3-d01b-4096-b87c-4adce312cb1b" containerName="tempest-tests-tempest-tests-runner" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.894767 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.903174 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-wc2rm" Dec 03 17:20:17 crc kubenswrapper[4768]: I1203 17:20:17.977255 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.067899 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"729081b2-0022-49f3-9ebc-8640c6de0a0a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.068074 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-642jq\" (UniqueName: \"kubernetes.io/projected/729081b2-0022-49f3-9ebc-8640c6de0a0a-kube-api-access-642jq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"729081b2-0022-49f3-9ebc-8640c6de0a0a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.169888 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"729081b2-0022-49f3-9ebc-8640c6de0a0a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.170018 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-642jq\" (UniqueName: \"kubernetes.io/projected/729081b2-0022-49f3-9ebc-8640c6de0a0a-kube-api-access-642jq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"729081b2-0022-49f3-9ebc-8640c6de0a0a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.170729 4768 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"729081b2-0022-49f3-9ebc-8640c6de0a0a\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.208174 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-642jq\" (UniqueName: \"kubernetes.io/projected/729081b2-0022-49f3-9ebc-8640c6de0a0a-kube-api-access-642jq\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"729081b2-0022-49f3-9ebc-8640c6de0a0a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.212739 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"729081b2-0022-49f3-9ebc-8640c6de0a0a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.262144 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.717452 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.728202 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 03 17:20:18 crc kubenswrapper[4768]: I1203 17:20:18.960956 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"729081b2-0022-49f3-9ebc-8640c6de0a0a","Type":"ContainerStarted","Data":"2773f7b0ad54e6ec02819a26088b91097dabdb8b158c313b1c672fc3920cad43"} Dec 03 17:20:20 crc kubenswrapper[4768]: I1203 17:20:20.984644 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"729081b2-0022-49f3-9ebc-8640c6de0a0a","Type":"ContainerStarted","Data":"48856379eafcafaf8d24bb1deba58b4f6dfb0dd5f8dd7bee78e9b63a60f0bed6"} Dec 03 17:20:21 crc kubenswrapper[4768]: I1203 17:20:21.003092 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.553208894 podStartE2EDuration="4.003071153s" podCreationTimestamp="2025-12-03 17:20:17 +0000 UTC" firstStartedPulling="2025-12-03 17:20:18.71704523 +0000 UTC m=+3715.636381663" lastFinishedPulling="2025-12-03 17:20:20.166907499 +0000 UTC m=+3717.086243922" observedRunningTime="2025-12-03 17:20:20.997736171 +0000 UTC m=+3717.917072604" watchObservedRunningTime="2025-12-03 17:20:21.003071153 +0000 UTC m=+3717.922407586" Dec 03 17:20:27 crc kubenswrapper[4768]: I1203 17:20:27.532177 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:20:27 crc kubenswrapper[4768]: E1203 17:20:27.533316 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:20:39 crc kubenswrapper[4768]: I1203 17:20:39.531844 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:20:39 crc kubenswrapper[4768]: E1203 17:20:39.532680 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.222670 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs7bf/must-gather-hxvg9"] Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.225534 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.228863 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cs7bf"/"openshift-service-ca.crt" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.232938 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-cs7bf"/"kube-root-ca.crt" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.233246 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-cs7bf"/"default-dockercfg-ztvqk" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.246221 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cs7bf/must-gather-hxvg9"] Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.378503 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bf7189f8-31ab-4b63-9511-3144128149c5-must-gather-output\") pod \"must-gather-hxvg9\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.378634 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gr7mm\" (UniqueName: \"kubernetes.io/projected/bf7189f8-31ab-4b63-9511-3144128149c5-kube-api-access-gr7mm\") pod \"must-gather-hxvg9\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.480513 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bf7189f8-31ab-4b63-9511-3144128149c5-must-gather-output\") pod \"must-gather-hxvg9\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.480665 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gr7mm\" (UniqueName: \"kubernetes.io/projected/bf7189f8-31ab-4b63-9511-3144128149c5-kube-api-access-gr7mm\") pod \"must-gather-hxvg9\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.481523 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bf7189f8-31ab-4b63-9511-3144128149c5-must-gather-output\") pod \"must-gather-hxvg9\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.506876 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gr7mm\" (UniqueName: \"kubernetes.io/projected/bf7189f8-31ab-4b63-9511-3144128149c5-kube-api-access-gr7mm\") pod \"must-gather-hxvg9\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:44 crc kubenswrapper[4768]: I1203 17:20:44.565799 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:20:45 crc kubenswrapper[4768]: I1203 17:20:45.127092 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-cs7bf/must-gather-hxvg9"] Dec 03 17:20:45 crc kubenswrapper[4768]: I1203 17:20:45.251840 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" event={"ID":"bf7189f8-31ab-4b63-9511-3144128149c5","Type":"ContainerStarted","Data":"6a17bf975476bab988614b07f08088b02027bd9694cbed8faf02a44072332aa1"} Dec 03 17:20:51 crc kubenswrapper[4768]: I1203 17:20:51.324300 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" event={"ID":"bf7189f8-31ab-4b63-9511-3144128149c5","Type":"ContainerStarted","Data":"450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8"} Dec 03 17:20:51 crc kubenswrapper[4768]: I1203 17:20:51.326517 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" event={"ID":"bf7189f8-31ab-4b63-9511-3144128149c5","Type":"ContainerStarted","Data":"4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686"} Dec 03 17:20:51 crc kubenswrapper[4768]: I1203 17:20:51.346224 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" podStartSLOduration=1.949821195 podStartE2EDuration="7.34620454s" podCreationTimestamp="2025-12-03 17:20:44 +0000 UTC" firstStartedPulling="2025-12-03 17:20:45.13319317 +0000 UTC m=+3742.052529593" lastFinishedPulling="2025-12-03 17:20:50.529576525 +0000 UTC m=+3747.448912938" observedRunningTime="2025-12-03 17:20:51.344178516 +0000 UTC m=+3748.263514939" watchObservedRunningTime="2025-12-03 17:20:51.34620454 +0000 UTC m=+3748.265540973" Dec 03 17:20:51 crc kubenswrapper[4768]: I1203 17:20:51.532006 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:20:51 crc kubenswrapper[4768]: E1203 17:20:51.532330 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:20:53 crc kubenswrapper[4768]: E1203 17:20:53.994711 4768 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.38:37818->38.102.83.38:40273: write tcp 38.102.83.38:37818->38.102.83.38:40273: write: broken pipe Dec 03 17:20:54 crc kubenswrapper[4768]: I1203 17:20:54.804026 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-6rlxg"] Dec 03 17:20:54 crc kubenswrapper[4768]: I1203 17:20:54.805365 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:54 crc kubenswrapper[4768]: I1203 17:20:54.938028 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3946c66d-95a3-473b-98b5-77f9cbe93eb4-host\") pod \"crc-debug-6rlxg\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:54 crc kubenswrapper[4768]: I1203 17:20:54.938388 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd5pw\" (UniqueName: \"kubernetes.io/projected/3946c66d-95a3-473b-98b5-77f9cbe93eb4-kube-api-access-jd5pw\") pod \"crc-debug-6rlxg\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:55 crc kubenswrapper[4768]: I1203 17:20:55.040640 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3946c66d-95a3-473b-98b5-77f9cbe93eb4-host\") pod \"crc-debug-6rlxg\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:55 crc kubenswrapper[4768]: I1203 17:20:55.040781 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd5pw\" (UniqueName: \"kubernetes.io/projected/3946c66d-95a3-473b-98b5-77f9cbe93eb4-kube-api-access-jd5pw\") pod \"crc-debug-6rlxg\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:55 crc kubenswrapper[4768]: I1203 17:20:55.040810 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3946c66d-95a3-473b-98b5-77f9cbe93eb4-host\") pod \"crc-debug-6rlxg\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:55 crc kubenswrapper[4768]: I1203 17:20:55.062664 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd5pw\" (UniqueName: \"kubernetes.io/projected/3946c66d-95a3-473b-98b5-77f9cbe93eb4-kube-api-access-jd5pw\") pod \"crc-debug-6rlxg\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:55 crc kubenswrapper[4768]: I1203 17:20:55.139461 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:20:55 crc kubenswrapper[4768]: I1203 17:20:55.363112 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" event={"ID":"3946c66d-95a3-473b-98b5-77f9cbe93eb4","Type":"ContainerStarted","Data":"2f52fab599e0ff16a0d66d6e256825c854f4f81fc329b383399e02ab052e65ae"} Dec 03 17:21:02 crc kubenswrapper[4768]: I1203 17:21:02.531543 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:21:02 crc kubenswrapper[4768]: E1203 17:21:02.532312 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:21:09 crc kubenswrapper[4768]: I1203 17:21:09.504293 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" event={"ID":"3946c66d-95a3-473b-98b5-77f9cbe93eb4","Type":"ContainerStarted","Data":"52866473b1177db8eaac7ba798385cab2035df9fe0c96ffbad5e9d87191ff516"} Dec 03 17:21:09 crc kubenswrapper[4768]: I1203 17:21:09.527164 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" podStartSLOduration=2.380406027 podStartE2EDuration="15.527143353s" podCreationTimestamp="2025-12-03 17:20:54 +0000 UTC" firstStartedPulling="2025-12-03 17:20:55.17391543 +0000 UTC m=+3752.093251853" lastFinishedPulling="2025-12-03 17:21:08.320652746 +0000 UTC m=+3765.239989179" observedRunningTime="2025-12-03 17:21:09.520029354 +0000 UTC m=+3766.439365777" watchObservedRunningTime="2025-12-03 17:21:09.527143353 +0000 UTC m=+3766.446479776" Dec 03 17:21:14 crc kubenswrapper[4768]: I1203 17:21:14.531408 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:21:14 crc kubenswrapper[4768]: E1203 17:21:14.532263 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:21:28 crc kubenswrapper[4768]: I1203 17:21:28.531396 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:21:28 crc kubenswrapper[4768]: E1203 17:21:28.532209 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:21:42 crc kubenswrapper[4768]: I1203 17:21:42.532283 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:21:42 crc kubenswrapper[4768]: E1203 17:21:42.533199 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:21:50 crc kubenswrapper[4768]: I1203 17:21:50.932956 4768 generic.go:334] "Generic (PLEG): container finished" podID="3946c66d-95a3-473b-98b5-77f9cbe93eb4" containerID="52866473b1177db8eaac7ba798385cab2035df9fe0c96ffbad5e9d87191ff516" exitCode=0 Dec 03 17:21:50 crc kubenswrapper[4768]: I1203 17:21:50.933043 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" event={"ID":"3946c66d-95a3-473b-98b5-77f9cbe93eb4","Type":"ContainerDied","Data":"52866473b1177db8eaac7ba798385cab2035df9fe0c96ffbad5e9d87191ff516"} Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.095543 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.133635 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-6rlxg"] Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.143783 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-6rlxg"] Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.259709 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd5pw\" (UniqueName: \"kubernetes.io/projected/3946c66d-95a3-473b-98b5-77f9cbe93eb4-kube-api-access-jd5pw\") pod \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.259799 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3946c66d-95a3-473b-98b5-77f9cbe93eb4-host\") pod \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\" (UID: \"3946c66d-95a3-473b-98b5-77f9cbe93eb4\") " Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.259911 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3946c66d-95a3-473b-98b5-77f9cbe93eb4-host" (OuterVolumeSpecName: "host") pod "3946c66d-95a3-473b-98b5-77f9cbe93eb4" (UID: "3946c66d-95a3-473b-98b5-77f9cbe93eb4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.260549 4768 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3946c66d-95a3-473b-98b5-77f9cbe93eb4-host\") on node \"crc\" DevicePath \"\"" Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.268835 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3946c66d-95a3-473b-98b5-77f9cbe93eb4-kube-api-access-jd5pw" (OuterVolumeSpecName: "kube-api-access-jd5pw") pod "3946c66d-95a3-473b-98b5-77f9cbe93eb4" (UID: "3946c66d-95a3-473b-98b5-77f9cbe93eb4"). InnerVolumeSpecName "kube-api-access-jd5pw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.364248 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd5pw\" (UniqueName: \"kubernetes.io/projected/3946c66d-95a3-473b-98b5-77f9cbe93eb4-kube-api-access-jd5pw\") on node \"crc\" DevicePath \"\"" Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.952734 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f52fab599e0ff16a0d66d6e256825c854f4f81fc329b383399e02ab052e65ae" Dec 03 17:21:52 crc kubenswrapper[4768]: I1203 17:21:52.952837 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-6rlxg" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.330348 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-c9zsm"] Dec 03 17:21:53 crc kubenswrapper[4768]: E1203 17:21:53.331028 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3946c66d-95a3-473b-98b5-77f9cbe93eb4" containerName="container-00" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.331046 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="3946c66d-95a3-473b-98b5-77f9cbe93eb4" containerName="container-00" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.331294 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="3946c66d-95a3-473b-98b5-77f9cbe93eb4" containerName="container-00" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.332365 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.487085 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26be6db4-6056-4614-a736-d7aec92eb9e0-host\") pod \"crc-debug-c9zsm\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.487159 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf48r\" (UniqueName: \"kubernetes.io/projected/26be6db4-6056-4614-a736-d7aec92eb9e0-kube-api-access-sf48r\") pod \"crc-debug-c9zsm\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.545712 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3946c66d-95a3-473b-98b5-77f9cbe93eb4" path="/var/lib/kubelet/pods/3946c66d-95a3-473b-98b5-77f9cbe93eb4/volumes" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.595265 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26be6db4-6056-4614-a736-d7aec92eb9e0-host\") pod \"crc-debug-c9zsm\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.595383 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf48r\" (UniqueName: \"kubernetes.io/projected/26be6db4-6056-4614-a736-d7aec92eb9e0-kube-api-access-sf48r\") pod \"crc-debug-c9zsm\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.595388 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26be6db4-6056-4614-a736-d7aec92eb9e0-host\") pod \"crc-debug-c9zsm\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.616082 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf48r\" (UniqueName: \"kubernetes.io/projected/26be6db4-6056-4614-a736-d7aec92eb9e0-kube-api-access-sf48r\") pod \"crc-debug-c9zsm\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.659860 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:53 crc kubenswrapper[4768]: I1203 17:21:53.963838 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" event={"ID":"26be6db4-6056-4614-a736-d7aec92eb9e0","Type":"ContainerStarted","Data":"03067a598578c578e327482cb38ffc5d3e7d132591806b33af5b60b5ecf2d143"} Dec 03 17:21:54 crc kubenswrapper[4768]: I1203 17:21:54.977207 4768 generic.go:334] "Generic (PLEG): container finished" podID="26be6db4-6056-4614-a736-d7aec92eb9e0" containerID="655bba876011512d01575594714fed8c2689e2488d9ee9c295668078bc3059f6" exitCode=0 Dec 03 17:21:54 crc kubenswrapper[4768]: I1203 17:21:54.977555 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" event={"ID":"26be6db4-6056-4614-a736-d7aec92eb9e0","Type":"ContainerDied","Data":"655bba876011512d01575594714fed8c2689e2488d9ee9c295668078bc3059f6"} Dec 03 17:21:55 crc kubenswrapper[4768]: I1203 17:21:55.532715 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:21:55 crc kubenswrapper[4768]: E1203 17:21:55.533353 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.019028 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-c9zsm"] Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.029532 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-c9zsm"] Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.114163 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.254496 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sf48r\" (UniqueName: \"kubernetes.io/projected/26be6db4-6056-4614-a736-d7aec92eb9e0-kube-api-access-sf48r\") pod \"26be6db4-6056-4614-a736-d7aec92eb9e0\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.254565 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26be6db4-6056-4614-a736-d7aec92eb9e0-host\") pod \"26be6db4-6056-4614-a736-d7aec92eb9e0\" (UID: \"26be6db4-6056-4614-a736-d7aec92eb9e0\") " Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.255094 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/26be6db4-6056-4614-a736-d7aec92eb9e0-host" (OuterVolumeSpecName: "host") pod "26be6db4-6056-4614-a736-d7aec92eb9e0" (UID: "26be6db4-6056-4614-a736-d7aec92eb9e0"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.263235 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26be6db4-6056-4614-a736-d7aec92eb9e0-kube-api-access-sf48r" (OuterVolumeSpecName: "kube-api-access-sf48r") pod "26be6db4-6056-4614-a736-d7aec92eb9e0" (UID: "26be6db4-6056-4614-a736-d7aec92eb9e0"). InnerVolumeSpecName "kube-api-access-sf48r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.356970 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sf48r\" (UniqueName: \"kubernetes.io/projected/26be6db4-6056-4614-a736-d7aec92eb9e0-kube-api-access-sf48r\") on node \"crc\" DevicePath \"\"" Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.357002 4768 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/26be6db4-6056-4614-a736-d7aec92eb9e0-host\") on node \"crc\" DevicePath \"\"" Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.997004 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03067a598578c578e327482cb38ffc5d3e7d132591806b33af5b60b5ecf2d143" Dec 03 17:21:56 crc kubenswrapper[4768]: I1203 17:21:56.997065 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-c9zsm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.232769 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-rz8xm"] Dec 03 17:21:57 crc kubenswrapper[4768]: E1203 17:21:57.233428 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26be6db4-6056-4614-a736-d7aec92eb9e0" containerName="container-00" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.233440 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="26be6db4-6056-4614-a736-d7aec92eb9e0" containerName="container-00" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.233656 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="26be6db4-6056-4614-a736-d7aec92eb9e0" containerName="container-00" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.234559 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.377541 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9646492b-c11e-4afa-90f1-aa7802938ccc-host\") pod \"crc-debug-rz8xm\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.377665 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbh5d\" (UniqueName: \"kubernetes.io/projected/9646492b-c11e-4afa-90f1-aa7802938ccc-kube-api-access-vbh5d\") pod \"crc-debug-rz8xm\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.479996 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9646492b-c11e-4afa-90f1-aa7802938ccc-host\") pod \"crc-debug-rz8xm\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.480047 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbh5d\" (UniqueName: \"kubernetes.io/projected/9646492b-c11e-4afa-90f1-aa7802938ccc-kube-api-access-vbh5d\") pod \"crc-debug-rz8xm\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.480161 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9646492b-c11e-4afa-90f1-aa7802938ccc-host\") pod \"crc-debug-rz8xm\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.502351 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbh5d\" (UniqueName: \"kubernetes.io/projected/9646492b-c11e-4afa-90f1-aa7802938ccc-kube-api-access-vbh5d\") pod \"crc-debug-rz8xm\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.542778 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26be6db4-6056-4614-a736-d7aec92eb9e0" path="/var/lib/kubelet/pods/26be6db4-6056-4614-a736-d7aec92eb9e0/volumes" Dec 03 17:21:57 crc kubenswrapper[4768]: I1203 17:21:57.560475 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:58 crc kubenswrapper[4768]: I1203 17:21:58.017714 4768 generic.go:334] "Generic (PLEG): container finished" podID="9646492b-c11e-4afa-90f1-aa7802938ccc" containerID="db6f08579fd2eebcfa0c2c1f3a3c35723ee4902aa27939e423e403a6094f8353" exitCode=0 Dec 03 17:21:58 crc kubenswrapper[4768]: I1203 17:21:58.017764 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" event={"ID":"9646492b-c11e-4afa-90f1-aa7802938ccc","Type":"ContainerDied","Data":"db6f08579fd2eebcfa0c2c1f3a3c35723ee4902aa27939e423e403a6094f8353"} Dec 03 17:21:58 crc kubenswrapper[4768]: I1203 17:21:58.018008 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" event={"ID":"9646492b-c11e-4afa-90f1-aa7802938ccc","Type":"ContainerStarted","Data":"d7c0c6bb7acfb470cee76473612e86493cc6d3575bce08b702d2e980d2929ded"} Dec 03 17:21:58 crc kubenswrapper[4768]: I1203 17:21:58.058903 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-rz8xm"] Dec 03 17:21:58 crc kubenswrapper[4768]: I1203 17:21:58.067641 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs7bf/crc-debug-rz8xm"] Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.174545 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.315294 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9646492b-c11e-4afa-90f1-aa7802938ccc-host\") pod \"9646492b-c11e-4afa-90f1-aa7802938ccc\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.315401 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9646492b-c11e-4afa-90f1-aa7802938ccc-host" (OuterVolumeSpecName: "host") pod "9646492b-c11e-4afa-90f1-aa7802938ccc" (UID: "9646492b-c11e-4afa-90f1-aa7802938ccc"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.315417 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbh5d\" (UniqueName: \"kubernetes.io/projected/9646492b-c11e-4afa-90f1-aa7802938ccc-kube-api-access-vbh5d\") pod \"9646492b-c11e-4afa-90f1-aa7802938ccc\" (UID: \"9646492b-c11e-4afa-90f1-aa7802938ccc\") " Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.316124 4768 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/9646492b-c11e-4afa-90f1-aa7802938ccc-host\") on node \"crc\" DevicePath \"\"" Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.323192 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9646492b-c11e-4afa-90f1-aa7802938ccc-kube-api-access-vbh5d" (OuterVolumeSpecName: "kube-api-access-vbh5d") pod "9646492b-c11e-4afa-90f1-aa7802938ccc" (UID: "9646492b-c11e-4afa-90f1-aa7802938ccc"). InnerVolumeSpecName "kube-api-access-vbh5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.418278 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbh5d\" (UniqueName: \"kubernetes.io/projected/9646492b-c11e-4afa-90f1-aa7802938ccc-kube-api-access-vbh5d\") on node \"crc\" DevicePath \"\"" Dec 03 17:21:59 crc kubenswrapper[4768]: I1203 17:21:59.544306 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9646492b-c11e-4afa-90f1-aa7802938ccc" path="/var/lib/kubelet/pods/9646492b-c11e-4afa-90f1-aa7802938ccc/volumes" Dec 03 17:22:00 crc kubenswrapper[4768]: I1203 17:22:00.038819 4768 scope.go:117] "RemoveContainer" containerID="db6f08579fd2eebcfa0c2c1f3a3c35723ee4902aa27939e423e403a6094f8353" Dec 03 17:22:00 crc kubenswrapper[4768]: I1203 17:22:00.038861 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/crc-debug-rz8xm" Dec 03 17:22:10 crc kubenswrapper[4768]: I1203 17:22:10.531809 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:22:10 crc kubenswrapper[4768]: E1203 17:22:10.532844 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:22:24 crc kubenswrapper[4768]: I1203 17:22:24.533625 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:22:24 crc kubenswrapper[4768]: E1203 17:22:24.534512 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:22:33 crc kubenswrapper[4768]: I1203 17:22:33.243356 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/init-config-reloader/0.log" Dec 03 17:22:33 crc kubenswrapper[4768]: I1203 17:22:33.422053 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/init-config-reloader/0.log" Dec 03 17:22:33 crc kubenswrapper[4768]: I1203 17:22:33.422479 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/config-reloader/0.log" Dec 03 17:22:33 crc kubenswrapper[4768]: I1203 17:22:33.428365 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/alertmanager/0.log" Dec 03 17:22:33 crc kubenswrapper[4768]: I1203 17:22:33.649866 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-b4f5488d-vnlnp_b97311e7-d43d-44d1-b971-c8cb754c1773/barbican-api/0.log" Dec 03 17:22:33 crc kubenswrapper[4768]: I1203 17:22:33.654083 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-b4f5488d-vnlnp_b97311e7-d43d-44d1-b971-c8cb754c1773/barbican-api-log/0.log" Dec 03 17:22:33 crc kubenswrapper[4768]: I1203 17:22:33.781761 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-b57955886-dvqrj_6b2a173f-65dd-4b2f-b497-826614a4bc17/barbican-keystone-listener/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.041824 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-b57955886-dvqrj_6b2a173f-65dd-4b2f-b497-826614a4bc17/barbican-keystone-listener-log/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.095188 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-58db46799c-q4fgd_0d8d5741-c6d7-43c3-8f2e-da9817d0992b/barbican-worker/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.100569 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-58db46799c-q4fgd_0d8d5741-c6d7-43c3-8f2e-da9817d0992b/barbican-worker-log/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.260559 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4_76882892-8177-4627-a611-f9e6e75d9829/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.395126 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/ceilometer-central-agent/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.490784 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/ceilometer-notification-agent/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.542862 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/proxy-httpd/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.584520 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/sg-core/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.735403 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ed073244-61fa-4ca6-968e-e9cb0a419e4b/cinder-api-log/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.748988 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ed073244-61fa-4ca6-968e-e9cb0a419e4b/cinder-api/0.log" Dec 03 17:22:34 crc kubenswrapper[4768]: I1203 17:22:34.884589 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0378dd82-69e6-42b8-b5dd-26751ef9a0db/cinder-scheduler/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.072511 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0378dd82-69e6-42b8-b5dd-26751ef9a0db/probe/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.125405 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_e19aa1f3-5836-440b-bc7e-dfc10baf6511/cloudkitty-api-log/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.224923 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_e19aa1f3-5836-440b-bc7e-dfc10baf6511/cloudkitty-api/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.335870 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_08387864-260c-4260-bf37-e878d9207c7d/loki-compactor/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.445165 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-56cd74f89f-x5fb7_fb71d5cf-3561-4f62-a0c0-980ae81ab050/loki-distributor/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.538906 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-55lmd_83f9f0ed-f17f-4e94-bcc7-5108489ea003/gateway/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.642313 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-jhn5q_b9983072-bd22-4145-a740-6f479db8e8fd/gateway/0.log" Dec 03 17:22:35 crc kubenswrapper[4768]: I1203 17:22:35.852219 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_8cec597c-1827-4712-b016-5c7cfc55c585/loki-index-gateway/0.log" Dec 03 17:22:36 crc kubenswrapper[4768]: I1203 17:22:36.441092 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_5c0ad451-c513-4f94-ac08-aaa2c7df9ae8/loki-ingester/0.log" Dec 03 17:22:36 crc kubenswrapper[4768]: I1203 17:22:36.529945 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-779849886d-xc5w8_7fed8740-2999-4b8f-bd2a-2bdfea8f03a5/loki-query-frontend/0.log" Dec 03 17:22:36 crc kubenswrapper[4768]: I1203 17:22:36.904112 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf_8258f70b-4e7b-40d0-af22-a50690f99fa0/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:37 crc kubenswrapper[4768]: I1203 17:22:37.188804 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-7hk85_8660b1b7-7972-4b35-a50e-010de4788792/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:37 crc kubenswrapper[4768]: I1203 17:22:37.358208 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-548665d79b-qvtkc_5bbec9d6-615c-4007-b056-19ead8728139/loki-querier/0.log" Dec 03 17:22:37 crc kubenswrapper[4768]: I1203 17:22:37.467470 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-rckkv_bead7c34-6203-449b-b855-48ef80b18409/init/0.log" Dec 03 17:22:37 crc kubenswrapper[4768]: I1203 17:22:37.846692 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-rckkv_bead7c34-6203-449b-b855-48ef80b18409/init/0.log" Dec 03 17:22:37 crc kubenswrapper[4768]: I1203 17:22:37.901868 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-rckkv_bead7c34-6203-449b-b855-48ef80b18409/dnsmasq-dns/0.log" Dec 03 17:22:37 crc kubenswrapper[4768]: I1203 17:22:37.927150 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t_91a0e247-aab8-40b9-83e3-687d7f6a5927/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:38 crc kubenswrapper[4768]: I1203 17:22:38.162462 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_1821ab39-b12d-4311-a67e-01840cf95a09/glance-log/0.log" Dec 03 17:22:38 crc kubenswrapper[4768]: I1203 17:22:38.315426 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_1821ab39-b12d-4311-a67e-01840cf95a09/glance-httpd/0.log" Dec 03 17:22:38 crc kubenswrapper[4768]: I1203 17:22:38.530658 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_eeb2f38b-2ae6-408e-815c-5bcd14d35623/glance-httpd/0.log" Dec 03 17:22:38 crc kubenswrapper[4768]: I1203 17:22:38.533370 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:22:38 crc kubenswrapper[4768]: E1203 17:22:38.533878 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:22:38 crc kubenswrapper[4768]: I1203 17:22:38.557903 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_eeb2f38b-2ae6-408e-815c-5bcd14d35623/glance-log/0.log" Dec 03 17:22:38 crc kubenswrapper[4768]: I1203 17:22:38.716054 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-lmm92_f4bca08e-ad57-49ce-8fd2-29262a619a67/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:38 crc kubenswrapper[4768]: I1203 17:22:38.935282 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-v5lgz_a709e070-9d8d-43ab-8cca-46c4ac80bda3/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:39 crc kubenswrapper[4768]: I1203 17:22:39.147882 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29413021-2dppd_39c831b1-4d74-43e9-a798-a0ce0f8c9c15/keystone-cron/0.log" Dec 03 17:22:39 crc kubenswrapper[4768]: I1203 17:22:39.343047 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ed50faad-e23a-4fda-b993-1af6764ac5fb/kube-state-metrics/0.log" Dec 03 17:22:39 crc kubenswrapper[4768]: I1203 17:22:39.481505 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7c7c5849fb-krxhd_5c7cbc9b-e7e7-453c-b045-02d4f0317fff/keystone-api/0.log" Dec 03 17:22:39 crc kubenswrapper[4768]: I1203 17:22:39.488987 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp_2025631a-ad01-494e-a78d-095aaedfa302/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:40 crc kubenswrapper[4768]: I1203 17:22:40.031316 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_ef812a88-c111-4283-b7ba-f90f3e946eec/cloudkitty-proc/0.log" Dec 03 17:22:40 crc kubenswrapper[4768]: I1203 17:22:40.033523 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-644d64cc89-l6cqk_260f7230-73a0-4bec-b9c4-2805af398ab1/neutron-httpd/0.log" Dec 03 17:22:40 crc kubenswrapper[4768]: I1203 17:22:40.070349 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-644d64cc89-l6cqk_260f7230-73a0-4bec-b9c4-2805af398ab1/neutron-api/0.log" Dec 03 17:22:40 crc kubenswrapper[4768]: I1203 17:22:40.263436 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn_b4046334-7016-451c-b6d8-ad389cca206a/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:40 crc kubenswrapper[4768]: I1203 17:22:40.678657 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_2b4e5e05-1afb-4f90-93de-6331cd92bfcf/nova-api-log/0.log" Dec 03 17:22:40 crc kubenswrapper[4768]: I1203 17:22:40.856302 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8/nova-cell0-conductor-conductor/0.log" Dec 03 17:22:40 crc kubenswrapper[4768]: I1203 17:22:40.891442 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_2b4e5e05-1afb-4f90-93de-6331cd92bfcf/nova-api-api/0.log" Dec 03 17:22:41 crc kubenswrapper[4768]: I1203 17:22:41.091134 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_891b6c40-c436-4d8a-a035-c49252143ce1/nova-cell1-conductor-conductor/0.log" Dec 03 17:22:41 crc kubenswrapper[4768]: I1203 17:22:41.203139 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a8801603-67b7-4a04-b05e-de0651787247/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 17:22:41 crc kubenswrapper[4768]: I1203 17:22:41.377882 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-4vmcd_d11533b9-aa83-4403-8c50-0172908b6cc3/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:41 crc kubenswrapper[4768]: I1203 17:22:41.520726 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4f7cf11c-aca1-42f7-a9f4-e9c7c941269b/nova-metadata-log/0.log" Dec 03 17:22:41 crc kubenswrapper[4768]: I1203 17:22:41.963659 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3f6d3b77-fbdf-4dfd-b473-3e8288367442/mysql-bootstrap/0.log" Dec 03 17:22:41 crc kubenswrapper[4768]: I1203 17:22:41.995391 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_31a17195-ba31-4233-b087-f31d38ff03a7/nova-scheduler-scheduler/0.log" Dec 03 17:22:42 crc kubenswrapper[4768]: I1203 17:22:42.142940 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3f6d3b77-fbdf-4dfd-b473-3e8288367442/mysql-bootstrap/0.log" Dec 03 17:22:42 crc kubenswrapper[4768]: I1203 17:22:42.242537 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3f6d3b77-fbdf-4dfd-b473-3e8288367442/galera/0.log" Dec 03 17:22:42 crc kubenswrapper[4768]: I1203 17:22:42.351937 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3dc1084-f02c-45ff-87de-22a8818905b4/mysql-bootstrap/0.log" Dec 03 17:22:42 crc kubenswrapper[4768]: I1203 17:22:42.579034 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3dc1084-f02c-45ff-87de-22a8818905b4/galera/0.log" Dec 03 17:22:42 crc kubenswrapper[4768]: I1203 17:22:42.634066 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3dc1084-f02c-45ff-87de-22a8818905b4/mysql-bootstrap/0.log" Dec 03 17:22:42 crc kubenswrapper[4768]: I1203 17:22:42.742749 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4f7cf11c-aca1-42f7-a9f4-e9c7c941269b/nova-metadata-metadata/0.log" Dec 03 17:22:42 crc kubenswrapper[4768]: I1203 17:22:42.886793 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_8bddf09b-660e-4615-a1c6-72d46c7c2216/openstackclient/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.042565 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-6bhgk_87225d49-4f3c-44e3-a05d-feee87a94114/ovn-controller/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.225704 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-l8llw_ce67993a-adfa-412b-9c3c-37c6bb25f007/openstack-network-exporter/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.338130 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovsdb-server-init/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.509127 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovsdb-server-init/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.526557 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovsdb-server/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.584235 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovs-vswitchd/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.723347 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-t8vn5_0a655975-f7c9-49f9-9f76-05d58ae66f9b/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.887422 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0157f48e-0d1b-492c-8dc5-c859820905d8/openstack-network-exporter/0.log" Dec 03 17:22:43 crc kubenswrapper[4768]: I1203 17:22:43.912095 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0157f48e-0d1b-492c-8dc5-c859820905d8/ovn-northd/0.log" Dec 03 17:22:44 crc kubenswrapper[4768]: I1203 17:22:44.021262 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_a0d45532-8a91-4fa5-a7b5-21fdcf44160e/openstack-network-exporter/0.log" Dec 03 17:22:44 crc kubenswrapper[4768]: I1203 17:22:44.251856 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_a0d45532-8a91-4fa5-a7b5-21fdcf44160e/ovsdbserver-nb/0.log" Dec 03 17:22:44 crc kubenswrapper[4768]: I1203 17:22:44.330718 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cc4d3013-515d-4eb0-a20e-735bcdbed9db/openstack-network-exporter/0.log" Dec 03 17:22:44 crc kubenswrapper[4768]: I1203 17:22:44.417937 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cc4d3013-515d-4eb0-a20e-735bcdbed9db/ovsdbserver-sb/0.log" Dec 03 17:22:44 crc kubenswrapper[4768]: I1203 17:22:44.594814 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-85896597d4-l886p_9973c84b-640a-44cb-b0e0-e8a2d47ba909/placement-api/0.log" Dec 03 17:22:44 crc kubenswrapper[4768]: I1203 17:22:44.678568 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-85896597d4-l886p_9973c84b-640a-44cb-b0e0-e8a2d47ba909/placement-log/0.log" Dec 03 17:22:44 crc kubenswrapper[4768]: I1203 17:22:44.777507 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/init-config-reloader/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.080543 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/thanos-sidecar/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.094064 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/init-config-reloader/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.121124 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/prometheus/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.128367 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/config-reloader/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.358746 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e1ebf32c-184a-46da-8f0e-e955fb1fa5e8/setup-container/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.554343 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e1ebf32c-184a-46da-8f0e-e955fb1fa5e8/rabbitmq/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.655144 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e1ebf32c-184a-46da-8f0e-e955fb1fa5e8/setup-container/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.674275 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7f83f074-b1a6-4d38-8a36-a6335766064f/setup-container/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.948090 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7f83f074-b1a6-4d38-8a36-a6335766064f/setup-container/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.956344 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7f83f074-b1a6-4d38-8a36-a6335766064f/rabbitmq/0.log" Dec 03 17:22:45 crc kubenswrapper[4768]: I1203 17:22:45.998683 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq_591536f4-56e5-458a-b0f5-9a4d2effd8ff/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:46 crc kubenswrapper[4768]: I1203 17:22:46.222850 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-c9hxg_2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:46 crc kubenswrapper[4768]: I1203 17:22:46.303568 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9_ab27f0c2-92c5-4271-89a0-3faef991d57e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:46 crc kubenswrapper[4768]: I1203 17:22:46.750017 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-8rs7z_ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:46 crc kubenswrapper[4768]: I1203 17:22:46.786825 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-f7vnp_e7056232-6bbb-46d2-b15b-79dca6a43cb4/ssh-known-hosts-edpm-deployment/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.008205 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-58c4c66bd9-w8lwh_53c419ad-7c96-450d-be91-ae1598cfd390/proxy-server/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.120736 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-58c4c66bd9-w8lwh_53c419ad-7c96-450d-be91-ae1598cfd390/proxy-httpd/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.233834 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-rxvbr_43a9322c-e5fe-40d8-849f-dc84a5763f9c/swift-ring-rebalance/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.385565 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-reaper/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.391525 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-auditor/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.497971 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-replicator/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.542349 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-server/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.604754 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-auditor/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.647434 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-replicator/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.702825 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-server/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.785446 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-updater/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.907505 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-auditor/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.932457 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-expirer/0.log" Dec 03 17:22:47 crc kubenswrapper[4768]: I1203 17:22:47.998346 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-replicator/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.018707 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-server/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.099902 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-updater/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.224661 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/rsync/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.301669 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/swift-recon-cron/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.369589 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp_4f7d210c-5ea0-4b66-88f7-d8830a52109c/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.543063 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_430c4af3-d01b-4096-b87c-4adce312cb1b/tempest-tests-tempest-tests-runner/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.639253 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_729081b2-0022-49f3-9ebc-8640c6de0a0a/test-operator-logs-container/0.log" Dec 03 17:22:48 crc kubenswrapper[4768]: I1203 17:22:48.814254 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr_581e01fb-3c2a-4c39-926d-c25aebdfae5e/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:22:51 crc kubenswrapper[4768]: I1203 17:22:51.361453 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c42c7f90-2ae1-4222-864c-b8f7f1733beb/memcached/0.log" Dec 03 17:22:52 crc kubenswrapper[4768]: I1203 17:22:52.532779 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:22:52 crc kubenswrapper[4768]: E1203 17:22:52.533362 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:23:04 crc kubenswrapper[4768]: I1203 17:23:04.531478 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:23:04 crc kubenswrapper[4768]: E1203 17:23:04.532330 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:23:15 crc kubenswrapper[4768]: I1203 17:23:15.696873 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-g5nnn_8cb78567-ca7b-4a8b-9f94-b503727cf509/kube-rbac-proxy/0.log" Dec 03 17:23:15 crc kubenswrapper[4768]: I1203 17:23:15.699997 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-g5nnn_8cb78567-ca7b-4a8b-9f94-b503727cf509/manager/0.log" Dec 03 17:23:15 crc kubenswrapper[4768]: I1203 17:23:15.862761 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-jgz9x_0eb6c4a6-a68d-4d28-9b09-64a3dd981978/kube-rbac-proxy/0.log" Dec 03 17:23:15 crc kubenswrapper[4768]: I1203 17:23:15.981423 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-jgz9x_0eb6c4a6-a68d-4d28-9b09-64a3dd981978/manager/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.098888 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hg9tx_ff2d8ce7-0093-406f-982e-dac8b2b62593/kube-rbac-proxy/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.133268 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hg9tx_ff2d8ce7-0093-406f-982e-dac8b2b62593/manager/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.220399 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/util/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.410311 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/pull/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.414910 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/pull/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.416809 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/util/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.601473 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/util/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.663193 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/pull/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.704361 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/extract/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.816111 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5mfx6_ac20b433-8d19-4ffc-a3d8-001ab7660cfb/kube-rbac-proxy/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.934987 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5mfx6_ac20b433-8d19-4ffc-a3d8-001ab7660cfb/manager/0.log" Dec 03 17:23:16 crc kubenswrapper[4768]: I1203 17:23:16.981291 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-m97mz_085d4818-0975-441d-87fc-8c22aa78d86f/kube-rbac-proxy/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.041590 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-m97mz_085d4818-0975-441d-87fc-8c22aa78d86f/manager/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.181817 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-m8lmv_f91ea1ca-d4a3-47c9-a5a8-38a78224668a/kube-rbac-proxy/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.199000 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-m8lmv_f91ea1ca-d4a3-47c9-a5a8-38a78224668a/manager/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.392531 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-vj7sm_3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1/kube-rbac-proxy/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.551702 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9q857_ad48f666-a22a-4d97-9736-5f284268bd4a/kube-rbac-proxy/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.554097 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-vj7sm_3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1/manager/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.696334 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9q857_ad48f666-a22a-4d97-9736-5f284268bd4a/manager/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.811425 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-7pfgm_dc9eedd8-2956-447b-9a21-7b71bcb0c8c4/kube-rbac-proxy/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.871855 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-7pfgm_dc9eedd8-2956-447b-9a21-7b71bcb0c8c4/manager/0.log" Dec 03 17:23:17 crc kubenswrapper[4768]: I1203 17:23:17.997486 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-zv6pv_a9dcaa43-ad02-45aa-a320-dd9d2c609bf4/kube-rbac-proxy/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.104086 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-zv6pv_a9dcaa43-ad02-45aa-a320-dd9d2c609bf4/manager/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.177811 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9n7sr_34791f4b-32bc-44e5-90ca-ec286f96fe15/kube-rbac-proxy/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.247595 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9n7sr_34791f4b-32bc-44e5-90ca-ec286f96fe15/manager/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.339415 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-s6lpx_7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa/kube-rbac-proxy/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.409215 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-s6lpx_7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa/manager/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.521166 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bfpmf_29a3455b-b1d4-496e-936b-348846b289e0/kube-rbac-proxy/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.672569 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bfpmf_29a3455b-b1d4-496e-936b-348846b289e0/manager/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.685967 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cnc7h_bec968a9-b8ec-48f3-9625-96ce1f1e2dda/kube-rbac-proxy/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.777280 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cnc7h_bec968a9-b8ec-48f3-9625-96ce1f1e2dda/manager/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.870831 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp_bae1e6ba-54bf-411a-a2b9-b79b8ff85210/kube-rbac-proxy/0.log" Dec 03 17:23:18 crc kubenswrapper[4768]: I1203 17:23:18.912034 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp_bae1e6ba-54bf-411a-a2b9-b79b8ff85210/manager/0.log" Dec 03 17:23:19 crc kubenswrapper[4768]: I1203 17:23:19.410498 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cqdlr_f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f/registry-server/0.log" Dec 03 17:23:19 crc kubenswrapper[4768]: I1203 17:23:19.458553 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-65d54995fc-qt7jc_a616a1fc-015c-4f96-ab87-cb3fe397e123/operator/0.log" Dec 03 17:23:19 crc kubenswrapper[4768]: I1203 17:23:19.537414 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:23:19 crc kubenswrapper[4768]: E1203 17:23:19.539429 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:23:19 crc kubenswrapper[4768]: I1203 17:23:19.751033 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jwxs2_f6440acf-55b8-48fb-b212-550dcc9e9600/kube-rbac-proxy/0.log" Dec 03 17:23:19 crc kubenswrapper[4768]: I1203 17:23:19.827381 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jwxs2_f6440acf-55b8-48fb-b212-550dcc9e9600/manager/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.138742 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-qt8cv_982d5154-f537-4205-b268-3ce9aa7bdc37/manager/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.243029 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-qt8cv_982d5154-f537-4205-b268-3ce9aa7bdc37/kube-rbac-proxy/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.367731 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-df58498df-fdv5l_5a2dd1fe-2811-43db-959d-aceff599106d/manager/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.447897 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-vkjxc_f168e437-903a-4624-a0bc-95ea6b0e1789/operator/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.673103 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-n2qpd_347b8067-6147-477e-b00b-a5a60a29b7d8/kube-rbac-proxy/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.705011 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-n2qpd_347b8067-6147-477e-b00b-a5a60a29b7d8/manager/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.731429 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5b6647b8f8-ztfkl_f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5/kube-rbac-proxy/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.934651 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d2m42_e3af024c-b6f0-45c8-b5ab-6873b661878e/manager/0.log" Dec 03 17:23:20 crc kubenswrapper[4768]: I1203 17:23:20.952223 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d2m42_e3af024c-b6f0-45c8-b5ab-6873b661878e/kube-rbac-proxy/0.log" Dec 03 17:23:21 crc kubenswrapper[4768]: I1203 17:23:21.129417 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lg92p_051e5034-1155-4000-9d5b-96ee80ba6968/manager/0.log" Dec 03 17:23:21 crc kubenswrapper[4768]: I1203 17:23:21.169443 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lg92p_051e5034-1155-4000-9d5b-96ee80ba6968/kube-rbac-proxy/0.log" Dec 03 17:23:21 crc kubenswrapper[4768]: I1203 17:23:21.176253 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5b6647b8f8-ztfkl_f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5/manager/0.log" Dec 03 17:23:30 crc kubenswrapper[4768]: I1203 17:23:30.532276 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:23:30 crc kubenswrapper[4768]: E1203 17:23:30.533096 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.381193 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j9qp9"] Dec 03 17:23:40 crc kubenswrapper[4768]: E1203 17:23:40.382015 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9646492b-c11e-4afa-90f1-aa7802938ccc" containerName="container-00" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.382026 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="9646492b-c11e-4afa-90f1-aa7802938ccc" containerName="container-00" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.382246 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="9646492b-c11e-4afa-90f1-aa7802938ccc" containerName="container-00" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.389887 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.405212 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9qp9"] Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.437109 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnpv7\" (UniqueName: \"kubernetes.io/projected/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-kube-api-access-hnpv7\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.437576 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-utilities\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.437723 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-catalog-content\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.539306 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-utilities\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.539390 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-catalog-content\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.539429 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnpv7\" (UniqueName: \"kubernetes.io/projected/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-kube-api-access-hnpv7\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.539874 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-utilities\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.539891 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-catalog-content\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.557186 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnpv7\" (UniqueName: \"kubernetes.io/projected/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-kube-api-access-hnpv7\") pod \"redhat-marketplace-j9qp9\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:40 crc kubenswrapper[4768]: I1203 17:23:40.713184 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:41 crc kubenswrapper[4768]: I1203 17:23:41.278853 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9qp9"] Dec 03 17:23:41 crc kubenswrapper[4768]: I1203 17:23:41.535282 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:23:41 crc kubenswrapper[4768]: E1203 17:23:41.535518 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:23:42 crc kubenswrapper[4768]: I1203 17:23:42.198563 4768 generic.go:334] "Generic (PLEG): container finished" podID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerID="75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59" exitCode=0 Dec 03 17:23:42 crc kubenswrapper[4768]: I1203 17:23:42.198801 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9qp9" event={"ID":"f0a739f8-ffba-4f7f-ba8f-061bb427ea83","Type":"ContainerDied","Data":"75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59"} Dec 03 17:23:42 crc kubenswrapper[4768]: I1203 17:23:42.198970 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9qp9" event={"ID":"f0a739f8-ffba-4f7f-ba8f-061bb427ea83","Type":"ContainerStarted","Data":"19a15d4877241ea1cfe8b8cf76662df43382e4554a597c97816fe0c6f1ded457"} Dec 03 17:23:43 crc kubenswrapper[4768]: I1203 17:23:43.211914 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9qp9" event={"ID":"f0a739f8-ffba-4f7f-ba8f-061bb427ea83","Type":"ContainerStarted","Data":"60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705"} Dec 03 17:23:44 crc kubenswrapper[4768]: I1203 17:23:44.027945 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nqmfz_cadb4efb-b28b-43fc-883f-6cf96d18af72/kube-rbac-proxy/0.log" Dec 03 17:23:44 crc kubenswrapper[4768]: I1203 17:23:44.027982 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-sctcd_eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925/control-plane-machine-set-operator/0.log" Dec 03 17:23:44 crc kubenswrapper[4768]: I1203 17:23:44.109924 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nqmfz_cadb4efb-b28b-43fc-883f-6cf96d18af72/machine-api-operator/0.log" Dec 03 17:23:44 crc kubenswrapper[4768]: I1203 17:23:44.245346 4768 generic.go:334] "Generic (PLEG): container finished" podID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerID="60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705" exitCode=0 Dec 03 17:23:44 crc kubenswrapper[4768]: I1203 17:23:44.245385 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9qp9" event={"ID":"f0a739f8-ffba-4f7f-ba8f-061bb427ea83","Type":"ContainerDied","Data":"60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705"} Dec 03 17:23:45 crc kubenswrapper[4768]: I1203 17:23:45.256521 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9qp9" event={"ID":"f0a739f8-ffba-4f7f-ba8f-061bb427ea83","Type":"ContainerStarted","Data":"ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295"} Dec 03 17:23:45 crc kubenswrapper[4768]: I1203 17:23:45.272902 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j9qp9" podStartSLOduration=2.831205144 podStartE2EDuration="5.272886515s" podCreationTimestamp="2025-12-03 17:23:40 +0000 UTC" firstStartedPulling="2025-12-03 17:23:42.201247413 +0000 UTC m=+3919.120583836" lastFinishedPulling="2025-12-03 17:23:44.642928774 +0000 UTC m=+3921.562265207" observedRunningTime="2025-12-03 17:23:45.272090994 +0000 UTC m=+3922.191427427" watchObservedRunningTime="2025-12-03 17:23:45.272886515 +0000 UTC m=+3922.192222938" Dec 03 17:23:50 crc kubenswrapper[4768]: I1203 17:23:50.713298 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:50 crc kubenswrapper[4768]: I1203 17:23:50.713843 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:50 crc kubenswrapper[4768]: I1203 17:23:50.769047 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:51 crc kubenswrapper[4768]: I1203 17:23:51.357838 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:51 crc kubenswrapper[4768]: I1203 17:23:51.417708 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9qp9"] Dec 03 17:23:52 crc kubenswrapper[4768]: I1203 17:23:52.532690 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:23:52 crc kubenswrapper[4768]: E1203 17:23:52.533280 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:23:53 crc kubenswrapper[4768]: I1203 17:23:53.324106 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j9qp9" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="registry-server" containerID="cri-o://ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295" gracePeriod=2 Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.166231 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.222401 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-utilities\") pod \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.222509 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-catalog-content\") pod \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.222732 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnpv7\" (UniqueName: \"kubernetes.io/projected/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-kube-api-access-hnpv7\") pod \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\" (UID: \"f0a739f8-ffba-4f7f-ba8f-061bb427ea83\") " Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.224206 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-utilities" (OuterVolumeSpecName: "utilities") pod "f0a739f8-ffba-4f7f-ba8f-061bb427ea83" (UID: "f0a739f8-ffba-4f7f-ba8f-061bb427ea83"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.238837 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-kube-api-access-hnpv7" (OuterVolumeSpecName: "kube-api-access-hnpv7") pod "f0a739f8-ffba-4f7f-ba8f-061bb427ea83" (UID: "f0a739f8-ffba-4f7f-ba8f-061bb427ea83"). InnerVolumeSpecName "kube-api-access-hnpv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.248318 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f0a739f8-ffba-4f7f-ba8f-061bb427ea83" (UID: "f0a739f8-ffba-4f7f-ba8f-061bb427ea83"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.325835 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnpv7\" (UniqueName: \"kubernetes.io/projected/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-kube-api-access-hnpv7\") on node \"crc\" DevicePath \"\"" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.325878 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.325891 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f0a739f8-ffba-4f7f-ba8f-061bb427ea83-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.334312 4768 generic.go:334] "Generic (PLEG): container finished" podID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerID="ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295" exitCode=0 Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.334355 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9qp9" event={"ID":"f0a739f8-ffba-4f7f-ba8f-061bb427ea83","Type":"ContainerDied","Data":"ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295"} Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.334385 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j9qp9" event={"ID":"f0a739f8-ffba-4f7f-ba8f-061bb427ea83","Type":"ContainerDied","Data":"19a15d4877241ea1cfe8b8cf76662df43382e4554a597c97816fe0c6f1ded457"} Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.334403 4768 scope.go:117] "RemoveContainer" containerID="ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.334560 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j9qp9" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.370967 4768 scope.go:117] "RemoveContainer" containerID="60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.406992 4768 scope.go:117] "RemoveContainer" containerID="75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.407021 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9qp9"] Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.438112 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j9qp9"] Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.459200 4768 scope.go:117] "RemoveContainer" containerID="ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295" Dec 03 17:23:54 crc kubenswrapper[4768]: E1203 17:23:54.460257 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295\": container with ID starting with ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295 not found: ID does not exist" containerID="ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.460297 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295"} err="failed to get container status \"ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295\": rpc error: code = NotFound desc = could not find container \"ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295\": container with ID starting with ca4b5d1e08eb0e8988fd6dfbf22aeb7f9d03320d14962a750cc0f3559e8c7295 not found: ID does not exist" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.460324 4768 scope.go:117] "RemoveContainer" containerID="60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705" Dec 03 17:23:54 crc kubenswrapper[4768]: E1203 17:23:54.464809 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705\": container with ID starting with 60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705 not found: ID does not exist" containerID="60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.464850 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705"} err="failed to get container status \"60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705\": rpc error: code = NotFound desc = could not find container \"60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705\": container with ID starting with 60b5d788ee33c68e2613fa4cbd7994611f1dd06062e9da19a95c7c8e2342e705 not found: ID does not exist" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.464878 4768 scope.go:117] "RemoveContainer" containerID="75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59" Dec 03 17:23:54 crc kubenswrapper[4768]: E1203 17:23:54.465133 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59\": container with ID starting with 75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59 not found: ID does not exist" containerID="75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59" Dec 03 17:23:54 crc kubenswrapper[4768]: I1203 17:23:54.465156 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59"} err="failed to get container status \"75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59\": rpc error: code = NotFound desc = could not find container \"75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59\": container with ID starting with 75d1127f4c16b0b34b8f519ce27fea34a8ea384caf29ca40bf7011b4e7ca7f59 not found: ID does not exist" Dec 03 17:23:55 crc kubenswrapper[4768]: I1203 17:23:55.541453 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" path="/var/lib/kubelet/pods/f0a739f8-ffba-4f7f-ba8f-061bb427ea83/volumes" Dec 03 17:23:59 crc kubenswrapper[4768]: I1203 17:23:59.642235 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-btgph_27f311f3-af6d-45b4-8e9f-b4437d56350c/cert-manager-controller/0.log" Dec 03 17:23:59 crc kubenswrapper[4768]: I1203 17:23:59.856291 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-4l62p_ae9d8259-9e85-404b-8a1e-909147ffb4a7/cert-manager-webhook/0.log" Dec 03 17:23:59 crc kubenswrapper[4768]: I1203 17:23:59.903540 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-k2zl7_540cd9b8-af71-4d61-bdab-50850c4eec6d/cert-manager-cainjector/0.log" Dec 03 17:24:06 crc kubenswrapper[4768]: I1203 17:24:06.531930 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:24:07 crc kubenswrapper[4768]: I1203 17:24:07.465055 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"4e5e53ad3113846333c0d737b3c05658feeefd74555b0979cda0cae63d8ab5ae"} Dec 03 17:24:14 crc kubenswrapper[4768]: I1203 17:24:14.713747 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-79jcg_fee7ecf6-cf74-41de-b6f7-16e83ab2cd84/nmstate-console-plugin/0.log" Dec 03 17:24:14 crc kubenswrapper[4768]: I1203 17:24:14.827812 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-v6x5p_6aff85f3-8f5e-44d9-be27-1bc63b1d8a38/nmstate-handler/0.log" Dec 03 17:24:14 crc kubenswrapper[4768]: I1203 17:24:14.914896 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ggfpn_3f27fdbd-f4fd-462c-9931-deb08bc97037/kube-rbac-proxy/0.log" Dec 03 17:24:14 crc kubenswrapper[4768]: I1203 17:24:14.917990 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ggfpn_3f27fdbd-f4fd-462c-9931-deb08bc97037/nmstate-metrics/0.log" Dec 03 17:24:15 crc kubenswrapper[4768]: I1203 17:24:15.101754 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-727lm_d339ee13-d547-4fa6-a7bb-17eabe43d15c/nmstate-webhook/0.log" Dec 03 17:24:15 crc kubenswrapper[4768]: I1203 17:24:15.143418 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-b4qp7_e4518900-d9d4-4ffd-a217-d8506b6d3027/nmstate-operator/0.log" Dec 03 17:24:31 crc kubenswrapper[4768]: I1203 17:24:31.075046 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/kube-rbac-proxy/0.log" Dec 03 17:24:31 crc kubenswrapper[4768]: I1203 17:24:31.113044 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/manager/0.log" Dec 03 17:24:47 crc kubenswrapper[4768]: I1203 17:24:47.444952 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-sbbq7_8bb39058-0f85-42fe-884e-f7ea6e389a1e/kube-rbac-proxy/0.log" Dec 03 17:24:47 crc kubenswrapper[4768]: I1203 17:24:47.469292 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-sbbq7_8bb39058-0f85-42fe-884e-f7ea6e389a1e/controller/0.log" Dec 03 17:24:47 crc kubenswrapper[4768]: I1203 17:24:47.705262 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:24:47 crc kubenswrapper[4768]: I1203 17:24:47.897322 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:24:47 crc kubenswrapper[4768]: I1203 17:24:47.930723 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:24:47 crc kubenswrapper[4768]: I1203 17:24:47.931541 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:24:47 crc kubenswrapper[4768]: I1203 17:24:47.954253 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.141063 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.228569 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.232316 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.245774 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.372662 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.434106 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.461158 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/controller/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.470770 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.740926 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/kube-rbac-proxy/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.803946 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/frr-metrics/0.log" Dec 03 17:24:48 crc kubenswrapper[4768]: I1203 17:24:48.845344 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/kube-rbac-proxy-frr/0.log" Dec 03 17:24:49 crc kubenswrapper[4768]: I1203 17:24:49.050779 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/reloader/0.log" Dec 03 17:24:49 crc kubenswrapper[4768]: I1203 17:24:49.088761 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-nlk8z_3eb4dbc3-d0f3-42bd-8d09-0af1ae304716/frr-k8s-webhook-server/0.log" Dec 03 17:24:49 crc kubenswrapper[4768]: I1203 17:24:49.380142 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-57f5c9498-vdmjc_7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3/manager/0.log" Dec 03 17:24:49 crc kubenswrapper[4768]: I1203 17:24:49.512631 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-98db5c7f-g87m7_6fb7770c-b85a-4bd3-9f49-dedffaeae0e3/webhook-server/0.log" Dec 03 17:24:49 crc kubenswrapper[4768]: I1203 17:24:49.829341 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-79sgk_6c148908-2f51-41ee-adb8-bfd5cb821ebf/kube-rbac-proxy/0.log" Dec 03 17:24:49 crc kubenswrapper[4768]: I1203 17:24:49.988290 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/frr/0.log" Dec 03 17:24:50 crc kubenswrapper[4768]: I1203 17:24:50.228374 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-79sgk_6c148908-2f51-41ee-adb8-bfd5cb821ebf/speaker/0.log" Dec 03 17:25:05 crc kubenswrapper[4768]: I1203 17:25:05.604487 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/util/0.log" Dec 03 17:25:05 crc kubenswrapper[4768]: I1203 17:25:05.838046 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/pull/0.log" Dec 03 17:25:05 crc kubenswrapper[4768]: I1203 17:25:05.859025 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/pull/0.log" Dec 03 17:25:05 crc kubenswrapper[4768]: I1203 17:25:05.886099 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/util/0.log" Dec 03 17:25:06 crc kubenswrapper[4768]: I1203 17:25:06.136548 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/util/0.log" Dec 03 17:25:06 crc kubenswrapper[4768]: I1203 17:25:06.177023 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/pull/0.log" Dec 03 17:25:06 crc kubenswrapper[4768]: I1203 17:25:06.186956 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/extract/0.log" Dec 03 17:25:06 crc kubenswrapper[4768]: I1203 17:25:06.876948 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/util/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.076063 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/util/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.084456 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/pull/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.084609 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/pull/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.253914 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/pull/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.260445 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/util/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.322949 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/extract/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.473968 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/util/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.710008 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/pull/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.711627 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/pull/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.769568 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/util/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.962527 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/util/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.988620 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/extract/0.log" Dec 03 17:25:07 crc kubenswrapper[4768]: I1203 17:25:07.989084 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/pull/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.118045 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/util/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.320762 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/util/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.339385 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/pull/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.345169 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/pull/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.562165 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/extract/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.656508 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/pull/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.667436 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/util/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.805699 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/util/0.log" Dec 03 17:25:08 crc kubenswrapper[4768]: I1203 17:25:08.992647 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/pull/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.007902 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/util/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.019280 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/pull/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.166383 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/pull/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.226872 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/util/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.235640 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/extract/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.236841 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-utilities/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.474542 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-utilities/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.488817 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-content/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.504675 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-content/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.742369 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-content/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.781711 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-utilities/0.log" Dec 03 17:25:09 crc kubenswrapper[4768]: I1203 17:25:09.914415 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-utilities/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.181428 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-content/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.218182 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-content/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.236935 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-utilities/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.383900 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-utilities/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.426190 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-content/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.457573 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/registry-server/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.640745 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-9jljw_60d2c487-bb7a-43ee-a699-906a81e5627d/marketplace-operator/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.695492 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/registry-server/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.775520 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-utilities/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.953915 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-utilities/0.log" Dec 03 17:25:10 crc kubenswrapper[4768]: I1203 17:25:10.954346 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-content/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.007019 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-content/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.209348 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-utilities/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.222661 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-content/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.350086 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-utilities/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.427448 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/registry-server/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.584523 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-utilities/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.598065 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-content/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.603347 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-content/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.778020 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-content/0.log" Dec 03 17:25:11 crc kubenswrapper[4768]: I1203 17:25:11.784275 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-utilities/0.log" Dec 03 17:25:12 crc kubenswrapper[4768]: I1203 17:25:12.488921 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/registry-server/0.log" Dec 03 17:25:27 crc kubenswrapper[4768]: I1203 17:25:27.432974 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-cgfsr_48a3a666-c857-4b4f-858c-43bc2f9d6f08/prometheus-operator/0.log" Dec 03 17:25:27 crc kubenswrapper[4768]: I1203 17:25:27.558141 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_286fed40-67a8-4eab-9ca8-3c7609503df1/prometheus-operator-admission-webhook/0.log" Dec 03 17:25:27 crc kubenswrapper[4768]: I1203 17:25:27.637876 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_cff0ca88-1474-46c7-b046-cec35a7d2409/prometheus-operator-admission-webhook/0.log" Dec 03 17:25:27 crc kubenswrapper[4768]: I1203 17:25:27.808728 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-7wglg_37340e28-1544-4f32-aed0-4c1d277cbf95/operator/0.log" Dec 03 17:25:27 crc kubenswrapper[4768]: I1203 17:25:27.862716 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-hrj6l_cebba08d-4a33-458a-9893-e717d6359f90/perses-operator/0.log" Dec 03 17:25:43 crc kubenswrapper[4768]: I1203 17:25:43.381339 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/kube-rbac-proxy/0.log" Dec 03 17:25:43 crc kubenswrapper[4768]: I1203 17:25:43.416422 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/manager/0.log" Dec 03 17:26:26 crc kubenswrapper[4768]: I1203 17:26:26.028404 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:26:26 crc kubenswrapper[4768]: I1203 17:26:26.029057 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:26:56 crc kubenswrapper[4768]: I1203 17:26:56.028374 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:26:56 crc kubenswrapper[4768]: I1203 17:26:56.029001 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.028328 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.028893 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.028950 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.030103 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4e5e53ad3113846333c0d737b3c05658feeefd74555b0979cda0cae63d8ab5ae"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.030176 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://4e5e53ad3113846333c0d737b3c05658feeefd74555b0979cda0cae63d8ab5ae" gracePeriod=600 Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.523716 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="4e5e53ad3113846333c0d737b3c05658feeefd74555b0979cda0cae63d8ab5ae" exitCode=0 Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.523980 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"4e5e53ad3113846333c0d737b3c05658feeefd74555b0979cda0cae63d8ab5ae"} Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.524050 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76"} Dec 03 17:27:26 crc kubenswrapper[4768]: I1203 17:27:26.524071 4768 scope.go:117] "RemoveContainer" containerID="1b84dfe475e90d9d34a5b531e8e7fbab8869d0c985830de7387b0f63897c4d70" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.180791 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-l88jw"] Dec 03 17:27:37 crc kubenswrapper[4768]: E1203 17:27:37.181630 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="extract-content" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.181643 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="extract-content" Dec 03 17:27:37 crc kubenswrapper[4768]: E1203 17:27:37.181659 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="extract-utilities" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.181666 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="extract-utilities" Dec 03 17:27:37 crc kubenswrapper[4768]: E1203 17:27:37.181696 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="registry-server" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.181702 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="registry-server" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.181879 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0a739f8-ffba-4f7f-ba8f-061bb427ea83" containerName="registry-server" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.183329 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.197560 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l88jw"] Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.320526 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-utilities\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.321314 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-catalog-content\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.321628 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndrm6\" (UniqueName: \"kubernetes.io/projected/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-kube-api-access-ndrm6\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.423851 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-catalog-content\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.423940 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndrm6\" (UniqueName: \"kubernetes.io/projected/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-kube-api-access-ndrm6\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.423998 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-utilities\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.424530 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-utilities\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.424795 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-catalog-content\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.475989 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndrm6\" (UniqueName: \"kubernetes.io/projected/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-kube-api-access-ndrm6\") pod \"redhat-operators-l88jw\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.522339 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.781184 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ghjd5"] Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.790528 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.810662 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ghjd5"] Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.939421 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-utilities\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.939490 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-catalog-content\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:37 crc kubenswrapper[4768]: I1203 17:27:37.940043 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpbls\" (UniqueName: \"kubernetes.io/projected/18e5b359-8d30-42b2-87ba-08526cae688f-kube-api-access-hpbls\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:38 crc kubenswrapper[4768]: I1203 17:27:38.041788 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-utilities\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:38 crc kubenswrapper[4768]: I1203 17:27:38.041853 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-catalog-content\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:38 crc kubenswrapper[4768]: I1203 17:27:38.041932 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpbls\" (UniqueName: \"kubernetes.io/projected/18e5b359-8d30-42b2-87ba-08526cae688f-kube-api-access-hpbls\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:38 crc kubenswrapper[4768]: I1203 17:27:38.043028 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-utilities\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:38 crc kubenswrapper[4768]: I1203 17:27:38.043262 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-catalog-content\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:38 crc kubenswrapper[4768]: I1203 17:27:38.072958 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpbls\" (UniqueName: \"kubernetes.io/projected/18e5b359-8d30-42b2-87ba-08526cae688f-kube-api-access-hpbls\") pod \"community-operators-ghjd5\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:38 crc kubenswrapper[4768]: I1203 17:27:38.126141 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.185688 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ghjd5"] Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.244040 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-l88jw"] Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.601559 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-46f7q"] Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.604827 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.641385 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-46f7q"] Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.701462 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l88jw" event={"ID":"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9","Type":"ContainerStarted","Data":"d52ba1912ad65123bd8d68d8b092ba33263e66d5ab21c88830eef7a49b4eb0a8"} Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.726341 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m482c\" (UniqueName: \"kubernetes.io/projected/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-kube-api-access-m482c\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.732132 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-catalog-content\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.732237 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-utilities\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.764558 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghjd5" event={"ID":"18e5b359-8d30-42b2-87ba-08526cae688f","Type":"ContainerStarted","Data":"bd18b90387a20d79632b916434ca159c3bf3b7de41415957c8fc49572b3e033a"} Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.836818 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-catalog-content\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.837423 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-utilities\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.837557 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-catalog-content\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.837966 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m482c\" (UniqueName: \"kubernetes.io/projected/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-kube-api-access-m482c\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.838167 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-utilities\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:39 crc kubenswrapper[4768]: I1203 17:27:39.862699 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m482c\" (UniqueName: \"kubernetes.io/projected/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-kube-api-access-m482c\") pod \"certified-operators-46f7q\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.078152 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.718289 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-46f7q"] Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.781532 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46f7q" event={"ID":"546377ca-f06b-4df1-ad4a-3f066d1a7ee5","Type":"ContainerStarted","Data":"7c8ef9e4bb60caa45db4262833f0e60edd03e80ca4ae77953a2921ca966365c0"} Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.787351 4768 generic.go:334] "Generic (PLEG): container finished" podID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerID="965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a" exitCode=0 Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.787443 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l88jw" event={"ID":"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9","Type":"ContainerDied","Data":"965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a"} Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.790392 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.792714 4768 generic.go:334] "Generic (PLEG): container finished" podID="18e5b359-8d30-42b2-87ba-08526cae688f" containerID="629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe" exitCode=0 Dec 03 17:27:40 crc kubenswrapper[4768]: I1203 17:27:40.792765 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghjd5" event={"ID":"18e5b359-8d30-42b2-87ba-08526cae688f","Type":"ContainerDied","Data":"629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe"} Dec 03 17:27:41 crc kubenswrapper[4768]: I1203 17:27:41.805271 4768 generic.go:334] "Generic (PLEG): container finished" podID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerID="f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d" exitCode=0 Dec 03 17:27:41 crc kubenswrapper[4768]: I1203 17:27:41.805376 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46f7q" event={"ID":"546377ca-f06b-4df1-ad4a-3f066d1a7ee5","Type":"ContainerDied","Data":"f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d"} Dec 03 17:27:42 crc kubenswrapper[4768]: I1203 17:27:42.824827 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l88jw" event={"ID":"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9","Type":"ContainerStarted","Data":"28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a"} Dec 03 17:27:42 crc kubenswrapper[4768]: I1203 17:27:42.831513 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghjd5" event={"ID":"18e5b359-8d30-42b2-87ba-08526cae688f","Type":"ContainerStarted","Data":"df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888"} Dec 03 17:27:43 crc kubenswrapper[4768]: I1203 17:27:43.853876 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46f7q" event={"ID":"546377ca-f06b-4df1-ad4a-3f066d1a7ee5","Type":"ContainerStarted","Data":"ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb"} Dec 03 17:27:45 crc kubenswrapper[4768]: I1203 17:27:45.900653 4768 generic.go:334] "Generic (PLEG): container finished" podID="18e5b359-8d30-42b2-87ba-08526cae688f" containerID="df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888" exitCode=0 Dec 03 17:27:45 crc kubenswrapper[4768]: I1203 17:27:45.900741 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghjd5" event={"ID":"18e5b359-8d30-42b2-87ba-08526cae688f","Type":"ContainerDied","Data":"df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888"} Dec 03 17:27:47 crc kubenswrapper[4768]: I1203 17:27:47.928754 4768 generic.go:334] "Generic (PLEG): container finished" podID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerID="ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb" exitCode=0 Dec 03 17:27:47 crc kubenswrapper[4768]: I1203 17:27:47.928871 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46f7q" event={"ID":"546377ca-f06b-4df1-ad4a-3f066d1a7ee5","Type":"ContainerDied","Data":"ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb"} Dec 03 17:27:48 crc kubenswrapper[4768]: I1203 17:27:48.854225 4768 scope.go:117] "RemoveContainer" containerID="52866473b1177db8eaac7ba798385cab2035df9fe0c96ffbad5e9d87191ff516" Dec 03 17:27:49 crc kubenswrapper[4768]: I1203 17:27:49.957473 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46f7q" event={"ID":"546377ca-f06b-4df1-ad4a-3f066d1a7ee5","Type":"ContainerStarted","Data":"d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d"} Dec 03 17:27:49 crc kubenswrapper[4768]: I1203 17:27:49.960910 4768 generic.go:334] "Generic (PLEG): container finished" podID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerID="28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a" exitCode=0 Dec 03 17:27:49 crc kubenswrapper[4768]: I1203 17:27:49.960987 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l88jw" event={"ID":"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9","Type":"ContainerDied","Data":"28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a"} Dec 03 17:27:49 crc kubenswrapper[4768]: I1203 17:27:49.967584 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghjd5" event={"ID":"18e5b359-8d30-42b2-87ba-08526cae688f","Type":"ContainerStarted","Data":"c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b"} Dec 03 17:27:50 crc kubenswrapper[4768]: I1203 17:27:50.005485 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-46f7q" podStartSLOduration=3.598988366 podStartE2EDuration="11.005453533s" podCreationTimestamp="2025-12-03 17:27:39 +0000 UTC" firstStartedPulling="2025-12-03 17:27:41.830255747 +0000 UTC m=+4158.749592170" lastFinishedPulling="2025-12-03 17:27:49.236720904 +0000 UTC m=+4166.156057337" observedRunningTime="2025-12-03 17:27:50.000021646 +0000 UTC m=+4166.919358069" watchObservedRunningTime="2025-12-03 17:27:50.005453533 +0000 UTC m=+4166.924789956" Dec 03 17:27:50 crc kubenswrapper[4768]: I1203 17:27:50.079480 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:50 crc kubenswrapper[4768]: I1203 17:27:50.079857 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:27:50 crc kubenswrapper[4768]: I1203 17:27:50.081797 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ghjd5" podStartSLOduration=5.180339603 podStartE2EDuration="13.081769497s" podCreationTimestamp="2025-12-03 17:27:37 +0000 UTC" firstStartedPulling="2025-12-03 17:27:40.794735366 +0000 UTC m=+4157.714071789" lastFinishedPulling="2025-12-03 17:27:48.69616527 +0000 UTC m=+4165.615501683" observedRunningTime="2025-12-03 17:27:50.07111479 +0000 UTC m=+4166.990451233" watchObservedRunningTime="2025-12-03 17:27:50.081769497 +0000 UTC m=+4167.001105920" Dec 03 17:27:50 crc kubenswrapper[4768]: I1203 17:27:50.985097 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l88jw" event={"ID":"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9","Type":"ContainerStarted","Data":"36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59"} Dec 03 17:27:51 crc kubenswrapper[4768]: I1203 17:27:51.031862 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-l88jw" podStartSLOduration=4.346308656 podStartE2EDuration="14.031832207s" podCreationTimestamp="2025-12-03 17:27:37 +0000 UTC" firstStartedPulling="2025-12-03 17:27:40.789909936 +0000 UTC m=+4157.709246359" lastFinishedPulling="2025-12-03 17:27:50.475433497 +0000 UTC m=+4167.394769910" observedRunningTime="2025-12-03 17:27:51.016167675 +0000 UTC m=+4167.935504098" watchObservedRunningTime="2025-12-03 17:27:51.031832207 +0000 UTC m=+4167.951168630" Dec 03 17:27:51 crc kubenswrapper[4768]: I1203 17:27:51.179684 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-46f7q" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="registry-server" probeResult="failure" output=< Dec 03 17:27:51 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 17:27:51 crc kubenswrapper[4768]: > Dec 03 17:27:53 crc kubenswrapper[4768]: I1203 17:27:53.013627 4768 generic.go:334] "Generic (PLEG): container finished" podID="bf7189f8-31ab-4b63-9511-3144128149c5" containerID="4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686" exitCode=0 Dec 03 17:27:53 crc kubenswrapper[4768]: I1203 17:27:53.013729 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" event={"ID":"bf7189f8-31ab-4b63-9511-3144128149c5","Type":"ContainerDied","Data":"4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686"} Dec 03 17:27:53 crc kubenswrapper[4768]: I1203 17:27:53.015625 4768 scope.go:117] "RemoveContainer" containerID="4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686" Dec 03 17:27:53 crc kubenswrapper[4768]: I1203 17:27:53.850284 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cs7bf_must-gather-hxvg9_bf7189f8-31ab-4b63-9511-3144128149c5/gather/0.log" Dec 03 17:27:57 crc kubenswrapper[4768]: I1203 17:27:57.523357 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:57 crc kubenswrapper[4768]: I1203 17:27:57.524017 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:27:58 crc kubenswrapper[4768]: I1203 17:27:58.126666 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:58 crc kubenswrapper[4768]: I1203 17:27:58.127007 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:58 crc kubenswrapper[4768]: I1203 17:27:58.202938 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:58 crc kubenswrapper[4768]: I1203 17:27:58.581643 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-l88jw" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="registry-server" probeResult="failure" output=< Dec 03 17:27:58 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 17:27:58 crc kubenswrapper[4768]: > Dec 03 17:27:59 crc kubenswrapper[4768]: I1203 17:27:59.144213 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:27:59 crc kubenswrapper[4768]: I1203 17:27:59.224573 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ghjd5"] Dec 03 17:28:00 crc kubenswrapper[4768]: I1203 17:28:00.139752 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:28:00 crc kubenswrapper[4768]: I1203 17:28:00.208003 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:28:00 crc kubenswrapper[4768]: I1203 17:28:00.858056 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-46f7q"] Dec 03 17:28:01 crc kubenswrapper[4768]: I1203 17:28:01.111759 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ghjd5" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="registry-server" containerID="cri-o://c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b" gracePeriod=2 Dec 03 17:28:01 crc kubenswrapper[4768]: I1203 17:28:01.941545 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.034897 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-catalog-content\") pod \"18e5b359-8d30-42b2-87ba-08526cae688f\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.035036 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpbls\" (UniqueName: \"kubernetes.io/projected/18e5b359-8d30-42b2-87ba-08526cae688f-kube-api-access-hpbls\") pod \"18e5b359-8d30-42b2-87ba-08526cae688f\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.035260 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-utilities\") pod \"18e5b359-8d30-42b2-87ba-08526cae688f\" (UID: \"18e5b359-8d30-42b2-87ba-08526cae688f\") " Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.035947 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-utilities" (OuterVolumeSpecName: "utilities") pod "18e5b359-8d30-42b2-87ba-08526cae688f" (UID: "18e5b359-8d30-42b2-87ba-08526cae688f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.037282 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.062449 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18e5b359-8d30-42b2-87ba-08526cae688f-kube-api-access-hpbls" (OuterVolumeSpecName: "kube-api-access-hpbls") pod "18e5b359-8d30-42b2-87ba-08526cae688f" (UID: "18e5b359-8d30-42b2-87ba-08526cae688f"). InnerVolumeSpecName "kube-api-access-hpbls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.099733 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "18e5b359-8d30-42b2-87ba-08526cae688f" (UID: "18e5b359-8d30-42b2-87ba-08526cae688f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.127323 4768 generic.go:334] "Generic (PLEG): container finished" podID="18e5b359-8d30-42b2-87ba-08526cae688f" containerID="c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b" exitCode=0 Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.127422 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghjd5" event={"ID":"18e5b359-8d30-42b2-87ba-08526cae688f","Type":"ContainerDied","Data":"c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b"} Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.127506 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ghjd5" event={"ID":"18e5b359-8d30-42b2-87ba-08526cae688f","Type":"ContainerDied","Data":"bd18b90387a20d79632b916434ca159c3bf3b7de41415957c8fc49572b3e033a"} Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.127481 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ghjd5" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.127533 4768 scope.go:117] "RemoveContainer" containerID="c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.128050 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-46f7q" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="registry-server" containerID="cri-o://d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d" gracePeriod=2 Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.143341 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18e5b359-8d30-42b2-87ba-08526cae688f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.143386 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpbls\" (UniqueName: \"kubernetes.io/projected/18e5b359-8d30-42b2-87ba-08526cae688f-kube-api-access-hpbls\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.164770 4768 scope.go:117] "RemoveContainer" containerID="df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.165588 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ghjd5"] Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.184157 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ghjd5"] Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.189758 4768 scope.go:117] "RemoveContainer" containerID="629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.382419 4768 scope.go:117] "RemoveContainer" containerID="c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b" Dec 03 17:28:02 crc kubenswrapper[4768]: E1203 17:28:02.383229 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b\": container with ID starting with c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b not found: ID does not exist" containerID="c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.383306 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b"} err="failed to get container status \"c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b\": rpc error: code = NotFound desc = could not find container \"c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b\": container with ID starting with c5e213d8c7fa53065557adab36c9e782dfc1a8b5623ed98bd989a7e652255c0b not found: ID does not exist" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.383356 4768 scope.go:117] "RemoveContainer" containerID="df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888" Dec 03 17:28:02 crc kubenswrapper[4768]: E1203 17:28:02.384139 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888\": container with ID starting with df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888 not found: ID does not exist" containerID="df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.384188 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888"} err="failed to get container status \"df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888\": rpc error: code = NotFound desc = could not find container \"df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888\": container with ID starting with df25061368105fbece2b409c25e349eacc7fa2d6d01caca26292fc11a60ec888 not found: ID does not exist" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.384224 4768 scope.go:117] "RemoveContainer" containerID="629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe" Dec 03 17:28:02 crc kubenswrapper[4768]: E1203 17:28:02.384646 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe\": container with ID starting with 629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe not found: ID does not exist" containerID="629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.384673 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe"} err="failed to get container status \"629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe\": rpc error: code = NotFound desc = could not find container \"629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe\": container with ID starting with 629ec27af5461aae0fbf4aa2ea0a2e2e36b9d9d2fc2c1645767ab5d1ec327dfe not found: ID does not exist" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.832747 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.963797 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-utilities\") pod \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.964123 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m482c\" (UniqueName: \"kubernetes.io/projected/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-kube-api-access-m482c\") pod \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.964213 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-catalog-content\") pod \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\" (UID: \"546377ca-f06b-4df1-ad4a-3f066d1a7ee5\") " Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.965226 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-utilities" (OuterVolumeSpecName: "utilities") pod "546377ca-f06b-4df1-ad4a-3f066d1a7ee5" (UID: "546377ca-f06b-4df1-ad4a-3f066d1a7ee5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:28:02 crc kubenswrapper[4768]: I1203 17:28:02.972806 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-kube-api-access-m482c" (OuterVolumeSpecName: "kube-api-access-m482c") pod "546377ca-f06b-4df1-ad4a-3f066d1a7ee5" (UID: "546377ca-f06b-4df1-ad4a-3f066d1a7ee5"). InnerVolumeSpecName "kube-api-access-m482c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.042785 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "546377ca-f06b-4df1-ad4a-3f066d1a7ee5" (UID: "546377ca-f06b-4df1-ad4a-3f066d1a7ee5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.068614 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m482c\" (UniqueName: \"kubernetes.io/projected/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-kube-api-access-m482c\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.068668 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.068684 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/546377ca-f06b-4df1-ad4a-3f066d1a7ee5-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.158349 4768 generic.go:334] "Generic (PLEG): container finished" podID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerID="d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d" exitCode=0 Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.158514 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46f7q" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.158577 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46f7q" event={"ID":"546377ca-f06b-4df1-ad4a-3f066d1a7ee5","Type":"ContainerDied","Data":"d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d"} Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.158677 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46f7q" event={"ID":"546377ca-f06b-4df1-ad4a-3f066d1a7ee5","Type":"ContainerDied","Data":"7c8ef9e4bb60caa45db4262833f0e60edd03e80ca4ae77953a2921ca966365c0"} Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.158725 4768 scope.go:117] "RemoveContainer" containerID="d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.219176 4768 scope.go:117] "RemoveContainer" containerID="ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.246682 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-46f7q"] Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.261580 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-46f7q"] Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.551870 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" path="/var/lib/kubelet/pods/18e5b359-8d30-42b2-87ba-08526cae688f/volumes" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.553542 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" path="/var/lib/kubelet/pods/546377ca-f06b-4df1-ad4a-3f066d1a7ee5/volumes" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.661256 4768 scope.go:117] "RemoveContainer" containerID="f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.729241 4768 scope.go:117] "RemoveContainer" containerID="d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d" Dec 03 17:28:03 crc kubenswrapper[4768]: E1203 17:28:03.730068 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d\": container with ID starting with d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d not found: ID does not exist" containerID="d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.730113 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d"} err="failed to get container status \"d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d\": rpc error: code = NotFound desc = could not find container \"d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d\": container with ID starting with d48ed2ad189749af4be8ec4f7b94f54ba7555d7e90a1aeb8f9acec6d05a4930d not found: ID does not exist" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.730144 4768 scope.go:117] "RemoveContainer" containerID="ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb" Dec 03 17:28:03 crc kubenswrapper[4768]: E1203 17:28:03.731704 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb\": container with ID starting with ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb not found: ID does not exist" containerID="ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.731730 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb"} err="failed to get container status \"ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb\": rpc error: code = NotFound desc = could not find container \"ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb\": container with ID starting with ef46045e794d3926fe37a197d90e23dc457c2533b0b32b7a2ae3826055a495bb not found: ID does not exist" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.731745 4768 scope.go:117] "RemoveContainer" containerID="f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d" Dec 03 17:28:03 crc kubenswrapper[4768]: E1203 17:28:03.733391 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d\": container with ID starting with f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d not found: ID does not exist" containerID="f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d" Dec 03 17:28:03 crc kubenswrapper[4768]: I1203 17:28:03.733418 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d"} err="failed to get container status \"f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d\": rpc error: code = NotFound desc = could not find container \"f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d\": container with ID starting with f6827f0630175cb0cff363a71b2e6b0e866a23f9cb354e756dee5339686e818d not found: ID does not exist" Dec 03 17:28:04 crc kubenswrapper[4768]: I1203 17:28:04.302167 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-cs7bf/must-gather-hxvg9"] Dec 03 17:28:04 crc kubenswrapper[4768]: I1203 17:28:04.302651 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" containerName="copy" containerID="cri-o://450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8" gracePeriod=2 Dec 03 17:28:04 crc kubenswrapper[4768]: I1203 17:28:04.314271 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-cs7bf/must-gather-hxvg9"] Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.188972 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cs7bf_must-gather-hxvg9_bf7189f8-31ab-4b63-9511-3144128149c5/copy/0.log" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.191133 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.208755 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-cs7bf_must-gather-hxvg9_bf7189f8-31ab-4b63-9511-3144128149c5/copy/0.log" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.209333 4768 generic.go:334] "Generic (PLEG): container finished" podID="bf7189f8-31ab-4b63-9511-3144128149c5" containerID="450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8" exitCode=143 Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.209433 4768 scope.go:117] "RemoveContainer" containerID="450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.209485 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-cs7bf/must-gather-hxvg9" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.235545 4768 scope.go:117] "RemoveContainer" containerID="4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.273448 4768 scope.go:117] "RemoveContainer" containerID="450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8" Dec 03 17:28:05 crc kubenswrapper[4768]: E1203 17:28:05.273958 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8\": container with ID starting with 450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8 not found: ID does not exist" containerID="450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.274002 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8"} err="failed to get container status \"450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8\": rpc error: code = NotFound desc = could not find container \"450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8\": container with ID starting with 450a6b3ea3c2c0f793e74d6d668e82d6e26aea35e2ec669cd6b1b3f42d39b0a8 not found: ID does not exist" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.274036 4768 scope.go:117] "RemoveContainer" containerID="4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686" Dec 03 17:28:05 crc kubenswrapper[4768]: E1203 17:28:05.274262 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686\": container with ID starting with 4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686 not found: ID does not exist" containerID="4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.274298 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686"} err="failed to get container status \"4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686\": rpc error: code = NotFound desc = could not find container \"4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686\": container with ID starting with 4dd19b25cebd0ace0e0ba9261d76cfc9c92f5d1608885a5e672a94baaaf91686 not found: ID does not exist" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.333674 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bf7189f8-31ab-4b63-9511-3144128149c5-must-gather-output\") pod \"bf7189f8-31ab-4b63-9511-3144128149c5\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.334153 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gr7mm\" (UniqueName: \"kubernetes.io/projected/bf7189f8-31ab-4b63-9511-3144128149c5-kube-api-access-gr7mm\") pod \"bf7189f8-31ab-4b63-9511-3144128149c5\" (UID: \"bf7189f8-31ab-4b63-9511-3144128149c5\") " Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.342027 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf7189f8-31ab-4b63-9511-3144128149c5-kube-api-access-gr7mm" (OuterVolumeSpecName: "kube-api-access-gr7mm") pod "bf7189f8-31ab-4b63-9511-3144128149c5" (UID: "bf7189f8-31ab-4b63-9511-3144128149c5"). InnerVolumeSpecName "kube-api-access-gr7mm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.437456 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gr7mm\" (UniqueName: \"kubernetes.io/projected/bf7189f8-31ab-4b63-9511-3144128149c5-kube-api-access-gr7mm\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.519172 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf7189f8-31ab-4b63-9511-3144128149c5-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "bf7189f8-31ab-4b63-9511-3144128149c5" (UID: "bf7189f8-31ab-4b63-9511-3144128149c5"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.544814 4768 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bf7189f8-31ab-4b63-9511-3144128149c5-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:05 crc kubenswrapper[4768]: I1203 17:28:05.547435 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" path="/var/lib/kubelet/pods/bf7189f8-31ab-4b63-9511-3144128149c5/volumes" Dec 03 17:28:07 crc kubenswrapper[4768]: I1203 17:28:07.600891 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:28:07 crc kubenswrapper[4768]: I1203 17:28:07.669651 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:28:08 crc kubenswrapper[4768]: I1203 17:28:08.678022 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l88jw"] Dec 03 17:28:09 crc kubenswrapper[4768]: I1203 17:28:09.261938 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-l88jw" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="registry-server" containerID="cri-o://36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59" gracePeriod=2 Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.068637 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.217240 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndrm6\" (UniqueName: \"kubernetes.io/projected/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-kube-api-access-ndrm6\") pod \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.217342 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-catalog-content\") pod \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.217437 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-utilities\") pod \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\" (UID: \"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9\") " Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.218556 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-utilities" (OuterVolumeSpecName: "utilities") pod "eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" (UID: "eb3ff288-99fe-4cf0-9408-ac8e36ef40b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.252179 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-kube-api-access-ndrm6" (OuterVolumeSpecName: "kube-api-access-ndrm6") pod "eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" (UID: "eb3ff288-99fe-4cf0-9408-ac8e36ef40b9"). InnerVolumeSpecName "kube-api-access-ndrm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.296768 4768 generic.go:334] "Generic (PLEG): container finished" podID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerID="36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59" exitCode=0 Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.296828 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l88jw" event={"ID":"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9","Type":"ContainerDied","Data":"36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59"} Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.296868 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-l88jw" event={"ID":"eb3ff288-99fe-4cf0-9408-ac8e36ef40b9","Type":"ContainerDied","Data":"d52ba1912ad65123bd8d68d8b092ba33263e66d5ab21c88830eef7a49b4eb0a8"} Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.296894 4768 scope.go:117] "RemoveContainer" containerID="36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.297098 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-l88jw" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.322772 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndrm6\" (UniqueName: \"kubernetes.io/projected/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-kube-api-access-ndrm6\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.322852 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.348930 4768 scope.go:117] "RemoveContainer" containerID="28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.412322 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" (UID: "eb3ff288-99fe-4cf0-9408-ac8e36ef40b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.426061 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.428974 4768 scope.go:117] "RemoveContainer" containerID="965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.477320 4768 scope.go:117] "RemoveContainer" containerID="36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59" Dec 03 17:28:10 crc kubenswrapper[4768]: E1203 17:28:10.478192 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59\": container with ID starting with 36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59 not found: ID does not exist" containerID="36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.478255 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59"} err="failed to get container status \"36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59\": rpc error: code = NotFound desc = could not find container \"36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59\": container with ID starting with 36395fc0a54ba937cd819e58137d8ed09d968e79642e50e411c090009037ea59 not found: ID does not exist" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.478339 4768 scope.go:117] "RemoveContainer" containerID="28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a" Dec 03 17:28:10 crc kubenswrapper[4768]: E1203 17:28:10.478936 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a\": container with ID starting with 28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a not found: ID does not exist" containerID="28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.478979 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a"} err="failed to get container status \"28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a\": rpc error: code = NotFound desc = could not find container \"28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a\": container with ID starting with 28718a0f0c61ace8dd8275b3db940500660b75faf3de0428704d77a1dd0c4a5a not found: ID does not exist" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.479004 4768 scope.go:117] "RemoveContainer" containerID="965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a" Dec 03 17:28:10 crc kubenswrapper[4768]: E1203 17:28:10.479358 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a\": container with ID starting with 965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a not found: ID does not exist" containerID="965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.479395 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a"} err="failed to get container status \"965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a\": rpc error: code = NotFound desc = could not find container \"965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a\": container with ID starting with 965ef5cc1ec4bd6f01e4118fa5d781ac57b1880bde2b392f8d0014db862dc98a not found: ID does not exist" Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.664097 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-l88jw"] Dec 03 17:28:10 crc kubenswrapper[4768]: I1203 17:28:10.678448 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-l88jw"] Dec 03 17:28:11 crc kubenswrapper[4768]: I1203 17:28:11.547355 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" path="/var/lib/kubelet/pods/eb3ff288-99fe-4cf0-9408-ac8e36ef40b9/volumes" Dec 03 17:28:49 crc kubenswrapper[4768]: I1203 17:28:49.221861 4768 scope.go:117] "RemoveContainer" containerID="655bba876011512d01575594714fed8c2689e2488d9ee9c295668078bc3059f6" Dec 03 17:29:26 crc kubenswrapper[4768]: I1203 17:29:26.028230 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:29:26 crc kubenswrapper[4768]: I1203 17:29:26.029006 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:29:56 crc kubenswrapper[4768]: I1203 17:29:56.028659 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:29:56 crc kubenswrapper[4768]: I1203 17:29:56.029254 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.182670 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc"] Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183630 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183644 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183662 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183668 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183687 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183693 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183712 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183718 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183732 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183738 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183753 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183759 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183770 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183779 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183798 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183806 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="extract-utilities" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183819 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" containerName="copy" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183828 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" containerName="copy" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183838 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183844 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="extract-content" Dec 03 17:30:00 crc kubenswrapper[4768]: E1203 17:30:00.183853 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" containerName="gather" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.183859 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" containerName="gather" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.184053 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="18e5b359-8d30-42b2-87ba-08526cae688f" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.184066 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="546377ca-f06b-4df1-ad4a-3f066d1a7ee5" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.184077 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" containerName="copy" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.184099 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb3ff288-99fe-4cf0-9408-ac8e36ef40b9" containerName="registry-server" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.184119 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf7189f8-31ab-4b63-9511-3144128149c5" containerName="gather" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.184851 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.187326 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.187874 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.200489 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc"] Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.274837 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-secret-volume\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.274907 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd9c6\" (UniqueName: \"kubernetes.io/projected/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-kube-api-access-pd9c6\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.274973 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-config-volume\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.377153 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-secret-volume\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.377213 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd9c6\" (UniqueName: \"kubernetes.io/projected/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-kube-api-access-pd9c6\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.377261 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-config-volume\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.378426 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-config-volume\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.382905 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-secret-volume\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.393084 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd9c6\" (UniqueName: \"kubernetes.io/projected/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-kube-api-access-pd9c6\") pod \"collect-profiles-29413050-2pxwc\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:00 crc kubenswrapper[4768]: I1203 17:30:00.507978 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:01 crc kubenswrapper[4768]: I1203 17:30:01.057134 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc"] Dec 03 17:30:01 crc kubenswrapper[4768]: I1203 17:30:01.492539 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" event={"ID":"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f","Type":"ContainerStarted","Data":"7d0613f5fab49c9d458fad7d73296d0fb3e983c4432c40ddb2b5d1390d408729"} Dec 03 17:30:01 crc kubenswrapper[4768]: I1203 17:30:01.492887 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" event={"ID":"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f","Type":"ContainerStarted","Data":"dfa2dcbbfc8a899874d21f27432af0a2f4d4b280708e3a80513b8df73589ef79"} Dec 03 17:30:02 crc kubenswrapper[4768]: I1203 17:30:02.505575 4768 generic.go:334] "Generic (PLEG): container finished" podID="ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f" containerID="7d0613f5fab49c9d458fad7d73296d0fb3e983c4432c40ddb2b5d1390d408729" exitCode=0 Dec 03 17:30:02 crc kubenswrapper[4768]: I1203 17:30:02.505713 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" event={"ID":"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f","Type":"ContainerDied","Data":"7d0613f5fab49c9d458fad7d73296d0fb3e983c4432c40ddb2b5d1390d408729"} Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.064069 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.164653 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd9c6\" (UniqueName: \"kubernetes.io/projected/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-kube-api-access-pd9c6\") pod \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.164729 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-config-volume\") pod \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.164949 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-secret-volume\") pod \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\" (UID: \"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f\") " Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.169210 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-config-volume" (OuterVolumeSpecName: "config-volume") pod "ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f" (UID: "ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.174831 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-kube-api-access-pd9c6" (OuterVolumeSpecName: "kube-api-access-pd9c6") pod "ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f" (UID: "ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f"). InnerVolumeSpecName "kube-api-access-pd9c6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.177863 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f" (UID: "ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.267163 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd9c6\" (UniqueName: \"kubernetes.io/projected/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-kube-api-access-pd9c6\") on node \"crc\" DevicePath \"\"" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.267204 4768 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.267216 4768 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.523518 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" event={"ID":"ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f","Type":"ContainerDied","Data":"dfa2dcbbfc8a899874d21f27432af0a2f4d4b280708e3a80513b8df73589ef79"} Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.523555 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfa2dcbbfc8a899874d21f27432af0a2f4d4b280708e3a80513b8df73589ef79" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.523606 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29413050-2pxwc" Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.642430 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj"] Dec 03 17:30:04 crc kubenswrapper[4768]: I1203 17:30:04.661497 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29413005-5bsgj"] Dec 03 17:30:05 crc kubenswrapper[4768]: I1203 17:30:05.544963 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2862f4a4-cd54-45e3-aa8b-322c0b39830d" path="/var/lib/kubelet/pods/2862f4a4-cd54-45e3-aa8b-322c0b39830d/volumes" Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.028453 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.029640 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.029721 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.031285 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.031367 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" gracePeriod=600 Dec 03 17:30:26 crc kubenswrapper[4768]: E1203 17:30:26.161107 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.728719 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" exitCode=0 Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.728763 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76"} Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.728815 4768 scope.go:117] "RemoveContainer" containerID="4e5e53ad3113846333c0d737b3c05658feeefd74555b0979cda0cae63d8ab5ae" Dec 03 17:30:26 crc kubenswrapper[4768]: I1203 17:30:26.729641 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:30:26 crc kubenswrapper[4768]: E1203 17:30:26.729913 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:30:39 crc kubenswrapper[4768]: I1203 17:30:39.531947 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:30:39 crc kubenswrapper[4768]: E1203 17:30:39.533169 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:30:49 crc kubenswrapper[4768]: I1203 17:30:49.374477 4768 scope.go:117] "RemoveContainer" containerID="d9c75baff1183188786cbaa07ab8ace2077d58a7611294b6abbe5dc49a6b0ff6" Dec 03 17:30:53 crc kubenswrapper[4768]: I1203 17:30:53.539571 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:30:53 crc kubenswrapper[4768]: E1203 17:30:53.540244 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.304147 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2g2q/must-gather-bnw4f"] Dec 03 17:31:02 crc kubenswrapper[4768]: E1203 17:31:02.305399 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f" containerName="collect-profiles" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.305415 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f" containerName="collect-profiles" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.311462 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad3aef4d-e6dc-4d71-be1b-ecd486cd9c1f" containerName="collect-profiles" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.313108 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.317354 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m2g2q"/"kube-root-ca.crt" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.317386 4768 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m2g2q"/"openshift-service-ca.crt" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.337041 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m2g2q/must-gather-bnw4f"] Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.483790 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fhsp\" (UniqueName: \"kubernetes.io/projected/2df97f25-8950-46d5-961a-6ccbb6cc1f50-kube-api-access-7fhsp\") pod \"must-gather-bnw4f\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.483907 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2df97f25-8950-46d5-961a-6ccbb6cc1f50-must-gather-output\") pod \"must-gather-bnw4f\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.585388 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2df97f25-8950-46d5-961a-6ccbb6cc1f50-must-gather-output\") pod \"must-gather-bnw4f\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.585548 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fhsp\" (UniqueName: \"kubernetes.io/projected/2df97f25-8950-46d5-961a-6ccbb6cc1f50-kube-api-access-7fhsp\") pod \"must-gather-bnw4f\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.585960 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2df97f25-8950-46d5-961a-6ccbb6cc1f50-must-gather-output\") pod \"must-gather-bnw4f\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.612780 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fhsp\" (UniqueName: \"kubernetes.io/projected/2df97f25-8950-46d5-961a-6ccbb6cc1f50-kube-api-access-7fhsp\") pod \"must-gather-bnw4f\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:02 crc kubenswrapper[4768]: I1203 17:31:02.640779 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:31:03 crc kubenswrapper[4768]: I1203 17:31:03.186669 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m2g2q/must-gather-bnw4f"] Dec 03 17:31:04 crc kubenswrapper[4768]: I1203 17:31:04.092028 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" event={"ID":"2df97f25-8950-46d5-961a-6ccbb6cc1f50","Type":"ContainerStarted","Data":"00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba"} Dec 03 17:31:04 crc kubenswrapper[4768]: I1203 17:31:04.092297 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" event={"ID":"2df97f25-8950-46d5-961a-6ccbb6cc1f50","Type":"ContainerStarted","Data":"b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4"} Dec 03 17:31:04 crc kubenswrapper[4768]: I1203 17:31:04.092310 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" event={"ID":"2df97f25-8950-46d5-961a-6ccbb6cc1f50","Type":"ContainerStarted","Data":"879a8f7ff033443e2bc98d701e6983bbfe3a253baba0ef8a716dfe1357ae9f7c"} Dec 03 17:31:04 crc kubenswrapper[4768]: I1203 17:31:04.121973 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" podStartSLOduration=2.121947641 podStartE2EDuration="2.121947641s" podCreationTimestamp="2025-12-03 17:31:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-03 17:31:04.111482802 +0000 UTC m=+4361.030819225" watchObservedRunningTime="2025-12-03 17:31:04.121947641 +0000 UTC m=+4361.041284074" Dec 03 17:31:07 crc kubenswrapper[4768]: I1203 17:31:07.531969 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:31:07 crc kubenswrapper[4768]: E1203 17:31:07.532841 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:31:07 crc kubenswrapper[4768]: I1203 17:31:07.746984 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-qpbpc"] Dec 03 17:31:07 crc kubenswrapper[4768]: I1203 17:31:07.749329 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:07 crc kubenswrapper[4768]: I1203 17:31:07.751762 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-m2g2q"/"default-dockercfg-gxwdb" Dec 03 17:31:07 crc kubenswrapper[4768]: I1203 17:31:07.921227 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a34f5309-f8ca-470a-bcfa-e930ec036ff7-host\") pod \"crc-debug-qpbpc\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:07 crc kubenswrapper[4768]: I1203 17:31:07.921500 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdcwp\" (UniqueName: \"kubernetes.io/projected/a34f5309-f8ca-470a-bcfa-e930ec036ff7-kube-api-access-qdcwp\") pod \"crc-debug-qpbpc\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:08 crc kubenswrapper[4768]: I1203 17:31:08.023321 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a34f5309-f8ca-470a-bcfa-e930ec036ff7-host\") pod \"crc-debug-qpbpc\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:08 crc kubenswrapper[4768]: I1203 17:31:08.023532 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a34f5309-f8ca-470a-bcfa-e930ec036ff7-host\") pod \"crc-debug-qpbpc\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:08 crc kubenswrapper[4768]: I1203 17:31:08.023921 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdcwp\" (UniqueName: \"kubernetes.io/projected/a34f5309-f8ca-470a-bcfa-e930ec036ff7-kube-api-access-qdcwp\") pod \"crc-debug-qpbpc\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:08 crc kubenswrapper[4768]: I1203 17:31:08.049917 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdcwp\" (UniqueName: \"kubernetes.io/projected/a34f5309-f8ca-470a-bcfa-e930ec036ff7-kube-api-access-qdcwp\") pod \"crc-debug-qpbpc\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:08 crc kubenswrapper[4768]: I1203 17:31:08.072544 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:08 crc kubenswrapper[4768]: W1203 17:31:08.162972 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda34f5309_f8ca_470a_bcfa_e930ec036ff7.slice/crio-f27423e24279600031115f0e903c249bd5e7ec79bb979ecc62334f0ecada0c9d WatchSource:0}: Error finding container f27423e24279600031115f0e903c249bd5e7ec79bb979ecc62334f0ecada0c9d: Status 404 returned error can't find the container with id f27423e24279600031115f0e903c249bd5e7ec79bb979ecc62334f0ecada0c9d Dec 03 17:31:09 crc kubenswrapper[4768]: I1203 17:31:09.148490 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" event={"ID":"a34f5309-f8ca-470a-bcfa-e930ec036ff7","Type":"ContainerStarted","Data":"84df39fd422b808ae400f6c1ca17e169d52714fc7335a3f1a5a9ae1b846c98f1"} Dec 03 17:31:09 crc kubenswrapper[4768]: I1203 17:31:09.149483 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" event={"ID":"a34f5309-f8ca-470a-bcfa-e930ec036ff7","Type":"ContainerStarted","Data":"f27423e24279600031115f0e903c249bd5e7ec79bb979ecc62334f0ecada0c9d"} Dec 03 17:31:19 crc kubenswrapper[4768]: I1203 17:31:19.532894 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:31:19 crc kubenswrapper[4768]: E1203 17:31:19.533544 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:31:32 crc kubenswrapper[4768]: I1203 17:31:32.531626 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:31:32 crc kubenswrapper[4768]: E1203 17:31:32.532356 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:31:45 crc kubenswrapper[4768]: I1203 17:31:45.539670 4768 generic.go:334] "Generic (PLEG): container finished" podID="a34f5309-f8ca-470a-bcfa-e930ec036ff7" containerID="84df39fd422b808ae400f6c1ca17e169d52714fc7335a3f1a5a9ae1b846c98f1" exitCode=0 Dec 03 17:31:45 crc kubenswrapper[4768]: I1203 17:31:45.562020 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" event={"ID":"a34f5309-f8ca-470a-bcfa-e930ec036ff7","Type":"ContainerDied","Data":"84df39fd422b808ae400f6c1ca17e169d52714fc7335a3f1a5a9ae1b846c98f1"} Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.676777 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.715959 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-qpbpc"] Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.724715 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-qpbpc"] Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.828962 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdcwp\" (UniqueName: \"kubernetes.io/projected/a34f5309-f8ca-470a-bcfa-e930ec036ff7-kube-api-access-qdcwp\") pod \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.829052 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a34f5309-f8ca-470a-bcfa-e930ec036ff7-host\") pod \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\" (UID: \"a34f5309-f8ca-470a-bcfa-e930ec036ff7\") " Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.829194 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a34f5309-f8ca-470a-bcfa-e930ec036ff7-host" (OuterVolumeSpecName: "host") pod "a34f5309-f8ca-470a-bcfa-e930ec036ff7" (UID: "a34f5309-f8ca-470a-bcfa-e930ec036ff7"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.829583 4768 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a34f5309-f8ca-470a-bcfa-e930ec036ff7-host\") on node \"crc\" DevicePath \"\"" Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.834778 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a34f5309-f8ca-470a-bcfa-e930ec036ff7-kube-api-access-qdcwp" (OuterVolumeSpecName: "kube-api-access-qdcwp") pod "a34f5309-f8ca-470a-bcfa-e930ec036ff7" (UID: "a34f5309-f8ca-470a-bcfa-e930ec036ff7"). InnerVolumeSpecName "kube-api-access-qdcwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:31:46 crc kubenswrapper[4768]: I1203 17:31:46.931370 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdcwp\" (UniqueName: \"kubernetes.io/projected/a34f5309-f8ca-470a-bcfa-e930ec036ff7-kube-api-access-qdcwp\") on node \"crc\" DevicePath \"\"" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.531968 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:31:47 crc kubenswrapper[4768]: E1203 17:31:47.532412 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.545134 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a34f5309-f8ca-470a-bcfa-e930ec036ff7" path="/var/lib/kubelet/pods/a34f5309-f8ca-470a-bcfa-e930ec036ff7/volumes" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.567112 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-qpbpc" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.567168 4768 scope.go:117] "RemoveContainer" containerID="84df39fd422b808ae400f6c1ca17e169d52714fc7335a3f1a5a9ae1b846c98f1" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.914760 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-gdkh9"] Dec 03 17:31:47 crc kubenswrapper[4768]: E1203 17:31:47.915295 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a34f5309-f8ca-470a-bcfa-e930ec036ff7" containerName="container-00" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.915312 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="a34f5309-f8ca-470a-bcfa-e930ec036ff7" containerName="container-00" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.915570 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="a34f5309-f8ca-470a-bcfa-e930ec036ff7" containerName="container-00" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.917730 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:47 crc kubenswrapper[4768]: I1203 17:31:47.926906 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-m2g2q"/"default-dockercfg-gxwdb" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.052945 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-host\") pod \"crc-debug-gdkh9\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.053032 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82phn\" (UniqueName: \"kubernetes.io/projected/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-kube-api-access-82phn\") pod \"crc-debug-gdkh9\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.154702 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-host\") pod \"crc-debug-gdkh9\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.154782 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82phn\" (UniqueName: \"kubernetes.io/projected/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-kube-api-access-82phn\") pod \"crc-debug-gdkh9\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.154829 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-host\") pod \"crc-debug-gdkh9\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.178190 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82phn\" (UniqueName: \"kubernetes.io/projected/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-kube-api-access-82phn\") pod \"crc-debug-gdkh9\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.247537 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:48 crc kubenswrapper[4768]: I1203 17:31:48.577931 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" event={"ID":"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1","Type":"ContainerStarted","Data":"daed4552f38eb2ed4dcc749c075b3aaebbe92ea756204c60830699f124dfbe81"} Dec 03 17:31:49 crc kubenswrapper[4768]: I1203 17:31:49.590751 4768 generic.go:334] "Generic (PLEG): container finished" podID="6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1" containerID="099d52e5169cb3496233d78e51642029e2853b23a810e8152998e8493bbb4483" exitCode=0 Dec 03 17:31:49 crc kubenswrapper[4768]: I1203 17:31:49.590826 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" event={"ID":"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1","Type":"ContainerDied","Data":"099d52e5169cb3496233d78e51642029e2853b23a810e8152998e8493bbb4483"} Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.566193 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-gdkh9"] Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.583390 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-gdkh9"] Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.729857 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.922989 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-host\") pod \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.923124 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-host" (OuterVolumeSpecName: "host") pod "6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1" (UID: "6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.923423 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82phn\" (UniqueName: \"kubernetes.io/projected/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-kube-api-access-82phn\") pod \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\" (UID: \"6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1\") " Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.924038 4768 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-host\") on node \"crc\" DevicePath \"\"" Dec 03 17:31:50 crc kubenswrapper[4768]: I1203 17:31:50.937972 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-kube-api-access-82phn" (OuterVolumeSpecName: "kube-api-access-82phn") pod "6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1" (UID: "6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1"). InnerVolumeSpecName "kube-api-access-82phn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.026242 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82phn\" (UniqueName: \"kubernetes.io/projected/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1-kube-api-access-82phn\") on node \"crc\" DevicePath \"\"" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.544405 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1" path="/var/lib/kubelet/pods/6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1/volumes" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.612942 4768 scope.go:117] "RemoveContainer" containerID="099d52e5169cb3496233d78e51642029e2853b23a810e8152998e8493bbb4483" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.612992 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-gdkh9" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.953327 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-7ksph"] Dec 03 17:31:51 crc kubenswrapper[4768]: E1203 17:31:51.955705 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1" containerName="container-00" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.955786 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1" containerName="container-00" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.956334 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a1414d7-2fc7-4e99-9f0b-c0351e6dabb1" containerName="container-00" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.957557 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:51 crc kubenswrapper[4768]: I1203 17:31:51.961519 4768 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-m2g2q"/"default-dockercfg-gxwdb" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.047119 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7ab5d060-e953-44ff-ab7e-641f32687662-host\") pod \"crc-debug-7ksph\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.047663 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgpx7\" (UniqueName: \"kubernetes.io/projected/7ab5d060-e953-44ff-ab7e-641f32687662-kube-api-access-pgpx7\") pod \"crc-debug-7ksph\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.150221 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7ab5d060-e953-44ff-ab7e-641f32687662-host\") pod \"crc-debug-7ksph\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.150375 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgpx7\" (UniqueName: \"kubernetes.io/projected/7ab5d060-e953-44ff-ab7e-641f32687662-kube-api-access-pgpx7\") pod \"crc-debug-7ksph\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.150811 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7ab5d060-e953-44ff-ab7e-641f32687662-host\") pod \"crc-debug-7ksph\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.178540 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgpx7\" (UniqueName: \"kubernetes.io/projected/7ab5d060-e953-44ff-ab7e-641f32687662-kube-api-access-pgpx7\") pod \"crc-debug-7ksph\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.287042 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:52 crc kubenswrapper[4768]: I1203 17:31:52.652926 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/crc-debug-7ksph" event={"ID":"7ab5d060-e953-44ff-ab7e-641f32687662","Type":"ContainerStarted","Data":"0a75a70c3d3d265a6763159491295ab54f8af0c2e14b833274419ad0787cf108"} Dec 03 17:31:53 crc kubenswrapper[4768]: I1203 17:31:53.674429 4768 generic.go:334] "Generic (PLEG): container finished" podID="7ab5d060-e953-44ff-ab7e-641f32687662" containerID="86105bf27daa8f3bf38e28bd73d4a2d8559961a3c27d99380e98e77ff2366adf" exitCode=0 Dec 03 17:31:53 crc kubenswrapper[4768]: I1203 17:31:53.674505 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/crc-debug-7ksph" event={"ID":"7ab5d060-e953-44ff-ab7e-641f32687662","Type":"ContainerDied","Data":"86105bf27daa8f3bf38e28bd73d4a2d8559961a3c27d99380e98e77ff2366adf"} Dec 03 17:31:53 crc kubenswrapper[4768]: I1203 17:31:53.713207 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-7ksph"] Dec 03 17:31:53 crc kubenswrapper[4768]: I1203 17:31:53.722570 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2g2q/crc-debug-7ksph"] Dec 03 17:31:54 crc kubenswrapper[4768]: I1203 17:31:54.814387 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:31:54 crc kubenswrapper[4768]: I1203 17:31:54.925808 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgpx7\" (UniqueName: \"kubernetes.io/projected/7ab5d060-e953-44ff-ab7e-641f32687662-kube-api-access-pgpx7\") pod \"7ab5d060-e953-44ff-ab7e-641f32687662\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " Dec 03 17:31:54 crc kubenswrapper[4768]: I1203 17:31:54.926421 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7ab5d060-e953-44ff-ab7e-641f32687662-host\") pod \"7ab5d060-e953-44ff-ab7e-641f32687662\" (UID: \"7ab5d060-e953-44ff-ab7e-641f32687662\") " Dec 03 17:31:54 crc kubenswrapper[4768]: I1203 17:31:54.926507 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7ab5d060-e953-44ff-ab7e-641f32687662-host" (OuterVolumeSpecName: "host") pod "7ab5d060-e953-44ff-ab7e-641f32687662" (UID: "7ab5d060-e953-44ff-ab7e-641f32687662"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 03 17:31:54 crc kubenswrapper[4768]: I1203 17:31:54.927221 4768 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7ab5d060-e953-44ff-ab7e-641f32687662-host\") on node \"crc\" DevicePath \"\"" Dec 03 17:31:54 crc kubenswrapper[4768]: I1203 17:31:54.937427 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ab5d060-e953-44ff-ab7e-641f32687662-kube-api-access-pgpx7" (OuterVolumeSpecName: "kube-api-access-pgpx7") pod "7ab5d060-e953-44ff-ab7e-641f32687662" (UID: "7ab5d060-e953-44ff-ab7e-641f32687662"). InnerVolumeSpecName "kube-api-access-pgpx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:31:55 crc kubenswrapper[4768]: I1203 17:31:55.029160 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgpx7\" (UniqueName: \"kubernetes.io/projected/7ab5d060-e953-44ff-ab7e-641f32687662-kube-api-access-pgpx7\") on node \"crc\" DevicePath \"\"" Dec 03 17:31:55 crc kubenswrapper[4768]: I1203 17:31:55.544679 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ab5d060-e953-44ff-ab7e-641f32687662" path="/var/lib/kubelet/pods/7ab5d060-e953-44ff-ab7e-641f32687662/volumes" Dec 03 17:31:55 crc kubenswrapper[4768]: I1203 17:31:55.695662 4768 scope.go:117] "RemoveContainer" containerID="86105bf27daa8f3bf38e28bd73d4a2d8559961a3c27d99380e98e77ff2366adf" Dec 03 17:31:55 crc kubenswrapper[4768]: I1203 17:31:55.695789 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/crc-debug-7ksph" Dec 03 17:32:01 crc kubenswrapper[4768]: I1203 17:32:01.531795 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:32:01 crc kubenswrapper[4768]: E1203 17:32:01.532770 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:32:16 crc kubenswrapper[4768]: I1203 17:32:16.532092 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:32:16 crc kubenswrapper[4768]: E1203 17:32:16.533259 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:32:25 crc kubenswrapper[4768]: I1203 17:32:25.906666 4768 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod7ab5d060-e953-44ff-ab7e-641f32687662"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod7ab5d060-e953-44ff-ab7e-641f32687662] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7ab5d060_e953_44ff_ab7e_641f32687662.slice" Dec 03 17:32:28 crc kubenswrapper[4768]: I1203 17:32:28.531190 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:32:28 crc kubenswrapper[4768]: E1203 17:32:28.531553 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:32:43 crc kubenswrapper[4768]: I1203 17:32:43.540490 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:32:43 crc kubenswrapper[4768]: E1203 17:32:43.541291 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:32:49 crc kubenswrapper[4768]: I1203 17:32:49.545525 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/init-config-reloader/0.log" Dec 03 17:32:49 crc kubenswrapper[4768]: I1203 17:32:49.838014 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/config-reloader/0.log" Dec 03 17:32:49 crc kubenswrapper[4768]: I1203 17:32:49.858368 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/init-config-reloader/0.log" Dec 03 17:32:49 crc kubenswrapper[4768]: I1203 17:32:49.865101 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f9cb5864-2bfb-49c0-8124-b11beb8fdad1/alertmanager/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.201162 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-b4f5488d-vnlnp_b97311e7-d43d-44d1-b971-c8cb754c1773/barbican-api/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.320953 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-b4f5488d-vnlnp_b97311e7-d43d-44d1-b971-c8cb754c1773/barbican-api-log/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.375159 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-b57955886-dvqrj_6b2a173f-65dd-4b2f-b497-826614a4bc17/barbican-keystone-listener/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.639403 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-b57955886-dvqrj_6b2a173f-65dd-4b2f-b497-826614a4bc17/barbican-keystone-listener-log/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.678010 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-58db46799c-q4fgd_0d8d5741-c6d7-43c3-8f2e-da9817d0992b/barbican-worker-log/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.688216 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-58db46799c-q4fgd_0d8d5741-c6d7-43c3-8f2e-da9817d0992b/barbican-worker/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.891366 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-m9jz4_76882892-8177-4627-a611-f9e6e75d9829/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:50 crc kubenswrapper[4768]: I1203 17:32:50.988872 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/ceilometer-central-agent/0.log" Dec 03 17:32:51 crc kubenswrapper[4768]: I1203 17:32:51.251114 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/proxy-httpd/0.log" Dec 03 17:32:51 crc kubenswrapper[4768]: I1203 17:32:51.336621 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/ceilometer-notification-agent/0.log" Dec 03 17:32:51 crc kubenswrapper[4768]: I1203 17:32:51.500085 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_d5a1be35-5f32-4810-b19f-6c6f7e8aa7f4/sg-core/0.log" Dec 03 17:32:51 crc kubenswrapper[4768]: I1203 17:32:51.620087 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ed073244-61fa-4ca6-968e-e9cb0a419e4b/cinder-api-log/0.log" Dec 03 17:32:51 crc kubenswrapper[4768]: I1203 17:32:51.658917 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_ed073244-61fa-4ca6-968e-e9cb0a419e4b/cinder-api/0.log" Dec 03 17:32:51 crc kubenswrapper[4768]: I1203 17:32:51.861919 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0378dd82-69e6-42b8-b5dd-26751ef9a0db/probe/0.log" Dec 03 17:32:51 crc kubenswrapper[4768]: I1203 17:32:51.872017 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_0378dd82-69e6-42b8-b5dd-26751ef9a0db/cinder-scheduler/0.log" Dec 03 17:32:52 crc kubenswrapper[4768]: I1203 17:32:52.074360 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_e19aa1f3-5836-440b-bc7e-dfc10baf6511/cloudkitty-api/0.log" Dec 03 17:32:52 crc kubenswrapper[4768]: I1203 17:32:52.081718 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_e19aa1f3-5836-440b-bc7e-dfc10baf6511/cloudkitty-api-log/0.log" Dec 03 17:32:52 crc kubenswrapper[4768]: I1203 17:32:52.250757 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_08387864-260c-4260-bf37-e878d9207c7d/loki-compactor/0.log" Dec 03 17:32:52 crc kubenswrapper[4768]: I1203 17:32:52.352223 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-56cd74f89f-x5fb7_fb71d5cf-3561-4f62-a0c0-980ae81ab050/loki-distributor/0.log" Dec 03 17:32:52 crc kubenswrapper[4768]: I1203 17:32:52.497736 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-55lmd_83f9f0ed-f17f-4e94-bcc7-5108489ea003/gateway/0.log" Dec 03 17:32:52 crc kubenswrapper[4768]: I1203 17:32:52.607308 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-76cc998948-jhn5q_b9983072-bd22-4145-a740-6f479db8e8fd/gateway/0.log" Dec 03 17:32:53 crc kubenswrapper[4768]: I1203 17:32:53.010473 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_8cec597c-1827-4712-b016-5c7cfc55c585/loki-index-gateway/0.log" Dec 03 17:32:53 crc kubenswrapper[4768]: I1203 17:32:53.375850 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-779849886d-xc5w8_7fed8740-2999-4b8f-bd2a-2bdfea8f03a5/loki-query-frontend/0.log" Dec 03 17:32:53 crc kubenswrapper[4768]: I1203 17:32:53.402559 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_5c0ad451-c513-4f94-ac08-aaa2c7df9ae8/loki-ingester/0.log" Dec 03 17:32:53 crc kubenswrapper[4768]: I1203 17:32:53.714316 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-bjlvf_8258f70b-4e7b-40d0-af22-a50690f99fa0/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:53 crc kubenswrapper[4768]: I1203 17:32:53.977581 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-7hk85_8660b1b7-7972-4b35-a50e-010de4788792/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.040430 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-rckkv_bead7c34-6203-449b-b855-48ef80b18409/init/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.110430 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_ef812a88-c111-4283-b7ba-f90f3e946eec/cloudkitty-proc/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.369059 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-548665d79b-qvtkc_5bbec9d6-615c-4007-b056-19ead8728139/loki-querier/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.376194 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-s5j4t_91a0e247-aab8-40b9-83e3-687d7f6a5927/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.388044 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-rckkv_bead7c34-6203-449b-b855-48ef80b18409/init/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.486616 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-85f64749dc-rckkv_bead7c34-6203-449b-b855-48ef80b18409/dnsmasq-dns/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.602938 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_1821ab39-b12d-4311-a67e-01840cf95a09/glance-httpd/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.732627 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_eeb2f38b-2ae6-408e-815c-5bcd14d35623/glance-httpd/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.736253 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_1821ab39-b12d-4311-a67e-01840cf95a09/glance-log/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.858404 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_eeb2f38b-2ae6-408e-815c-5bcd14d35623/glance-log/0.log" Dec 03 17:32:54 crc kubenswrapper[4768]: I1203 17:32:54.980369 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-lmm92_f4bca08e-ad57-49ce-8fd2-29262a619a67/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:55 crc kubenswrapper[4768]: I1203 17:32:55.052051 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-v5lgz_a709e070-9d8d-43ab-8cca-46c4ac80bda3/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:55 crc kubenswrapper[4768]: I1203 17:32:55.321911 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29413021-2dppd_39c831b1-4d74-43e9-a798-a0ce0f8c9c15/keystone-cron/0.log" Dec 03 17:32:55 crc kubenswrapper[4768]: I1203 17:32:55.401352 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_ed50faad-e23a-4fda-b993-1af6764ac5fb/kube-state-metrics/0.log" Dec 03 17:32:55 crc kubenswrapper[4768]: I1203 17:32:55.470496 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7c7c5849fb-krxhd_5c7cbc9b-e7e7-453c-b045-02d4f0317fff/keystone-api/0.log" Dec 03 17:32:55 crc kubenswrapper[4768]: I1203 17:32:55.730868 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-vl4wp_2025631a-ad01-494e-a78d-095aaedfa302/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:56 crc kubenswrapper[4768]: I1203 17:32:56.341628 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-2gxqn_b4046334-7016-451c-b6d8-ad389cca206a/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:56 crc kubenswrapper[4768]: I1203 17:32:56.354364 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-644d64cc89-l6cqk_260f7230-73a0-4bec-b9c4-2805af398ab1/neutron-httpd/0.log" Dec 03 17:32:56 crc kubenswrapper[4768]: I1203 17:32:56.422411 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-644d64cc89-l6cqk_260f7230-73a0-4bec-b9c4-2805af398ab1/neutron-api/0.log" Dec 03 17:32:56 crc kubenswrapper[4768]: I1203 17:32:56.532085 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:32:56 crc kubenswrapper[4768]: E1203 17:32:56.532646 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:32:56 crc kubenswrapper[4768]: I1203 17:32:56.909740 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_2b4e5e05-1afb-4f90-93de-6331cd92bfcf/nova-api-log/0.log" Dec 03 17:32:57 crc kubenswrapper[4768]: I1203 17:32:57.132100 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_8c8dd45b-dbea-4da0-b8d5-5d5ddeaa75e8/nova-cell0-conductor-conductor/0.log" Dec 03 17:32:57 crc kubenswrapper[4768]: I1203 17:32:57.457396 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_2b4e5e05-1afb-4f90-93de-6331cd92bfcf/nova-api-api/0.log" Dec 03 17:32:57 crc kubenswrapper[4768]: I1203 17:32:57.665108 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_891b6c40-c436-4d8a-a035-c49252143ce1/nova-cell1-conductor-conductor/0.log" Dec 03 17:32:57 crc kubenswrapper[4768]: I1203 17:32:57.886528 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a8801603-67b7-4a04-b05e-de0651787247/nova-cell1-novncproxy-novncproxy/0.log" Dec 03 17:32:57 crc kubenswrapper[4768]: I1203 17:32:57.912491 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-4vmcd_d11533b9-aa83-4403-8c50-0172908b6cc3/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:32:58 crc kubenswrapper[4768]: I1203 17:32:58.046544 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4f7cf11c-aca1-42f7-a9f4-e9c7c941269b/nova-metadata-log/0.log" Dec 03 17:32:58 crc kubenswrapper[4768]: I1203 17:32:58.353021 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_31a17195-ba31-4233-b087-f31d38ff03a7/nova-scheduler-scheduler/0.log" Dec 03 17:32:58 crc kubenswrapper[4768]: I1203 17:32:58.722223 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3f6d3b77-fbdf-4dfd-b473-3e8288367442/mysql-bootstrap/0.log" Dec 03 17:32:59 crc kubenswrapper[4768]: I1203 17:32:59.519173 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3f6d3b77-fbdf-4dfd-b473-3e8288367442/galera/0.log" Dec 03 17:32:59 crc kubenswrapper[4768]: I1203 17:32:59.530126 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_3f6d3b77-fbdf-4dfd-b473-3e8288367442/mysql-bootstrap/0.log" Dec 03 17:32:59 crc kubenswrapper[4768]: I1203 17:32:59.670350 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_4f7cf11c-aca1-42f7-a9f4-e9c7c941269b/nova-metadata-metadata/0.log" Dec 03 17:32:59 crc kubenswrapper[4768]: I1203 17:32:59.748165 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3dc1084-f02c-45ff-87de-22a8818905b4/mysql-bootstrap/0.log" Dec 03 17:32:59 crc kubenswrapper[4768]: I1203 17:32:59.988268 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_8bddf09b-660e-4615-a1c6-72d46c7c2216/openstackclient/0.log" Dec 03 17:32:59 crc kubenswrapper[4768]: I1203 17:32:59.995911 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3dc1084-f02c-45ff-87de-22a8818905b4/mysql-bootstrap/0.log" Dec 03 17:33:00 crc kubenswrapper[4768]: I1203 17:33:00.001544 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_c3dc1084-f02c-45ff-87de-22a8818905b4/galera/0.log" Dec 03 17:33:00 crc kubenswrapper[4768]: I1203 17:33:00.225684 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-6bhgk_87225d49-4f3c-44e3-a05d-feee87a94114/ovn-controller/0.log" Dec 03 17:33:00 crc kubenswrapper[4768]: I1203 17:33:00.233229 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-l8llw_ce67993a-adfa-412b-9c3c-37c6bb25f007/openstack-network-exporter/0.log" Dec 03 17:33:00 crc kubenswrapper[4768]: I1203 17:33:00.511041 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovsdb-server-init/0.log" Dec 03 17:33:00 crc kubenswrapper[4768]: I1203 17:33:00.784685 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovsdb-server-init/0.log" Dec 03 17:33:00 crc kubenswrapper[4768]: I1203 17:33:00.824325 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovs-vswitchd/0.log" Dec 03 17:33:00 crc kubenswrapper[4768]: I1203 17:33:00.925417 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-hjhg9_8e6b671c-fd9d-438c-9c0c-9db70f4a63ca/ovsdb-server/0.log" Dec 03 17:33:01 crc kubenswrapper[4768]: I1203 17:33:01.079060 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-t8vn5_0a655975-f7c9-49f9-9f76-05d58ae66f9b/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:33:01 crc kubenswrapper[4768]: I1203 17:33:01.148989 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0157f48e-0d1b-492c-8dc5-c859820905d8/openstack-network-exporter/0.log" Dec 03 17:33:01 crc kubenswrapper[4768]: I1203 17:33:01.201669 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0157f48e-0d1b-492c-8dc5-c859820905d8/ovn-northd/0.log" Dec 03 17:33:01 crc kubenswrapper[4768]: I1203 17:33:01.388117 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_a0d45532-8a91-4fa5-a7b5-21fdcf44160e/ovsdbserver-nb/0.log" Dec 03 17:33:01 crc kubenswrapper[4768]: I1203 17:33:01.419816 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_a0d45532-8a91-4fa5-a7b5-21fdcf44160e/openstack-network-exporter/0.log" Dec 03 17:33:01 crc kubenswrapper[4768]: I1203 17:33:01.901792 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cc4d3013-515d-4eb0-a20e-735bcdbed9db/openstack-network-exporter/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.001583 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cc4d3013-515d-4eb0-a20e-735bcdbed9db/ovsdbserver-sb/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.127415 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-85896597d4-l886p_9973c84b-640a-44cb-b0e0-e8a2d47ba909/placement-api/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.223669 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-85896597d4-l886p_9973c84b-640a-44cb-b0e0-e8a2d47ba909/placement-log/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.391313 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/init-config-reloader/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.598897 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/prometheus/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.611647 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/thanos-sidecar/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.624646 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/config-reloader/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.633532 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_98a2cac0-c468-421c-8acd-b7f7e3b471ea/init-config-reloader/0.log" Dec 03 17:33:02 crc kubenswrapper[4768]: I1203 17:33:02.834186 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e1ebf32c-184a-46da-8f0e-e955fb1fa5e8/setup-container/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.019068 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e1ebf32c-184a-46da-8f0e-e955fb1fa5e8/setup-container/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.111024 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7f83f074-b1a6-4d38-8a36-a6335766064f/setup-container/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.124898 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e1ebf32c-184a-46da-8f0e-e955fb1fa5e8/rabbitmq/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.382994 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7f83f074-b1a6-4d38-8a36-a6335766064f/rabbitmq/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.467839 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7f83f074-b1a6-4d38-8a36-a6335766064f/setup-container/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.503438 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-ppxjq_591536f4-56e5-458a-b0f5-9a4d2effd8ff/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.737924 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-c9hxg_2bb4292b-90d3-4ff1-8bcc-c14129e0d6c6/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.852491 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-ldjm9_ab27f0c2-92c5-4271-89a0-3faef991d57e/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:33:03 crc kubenswrapper[4768]: I1203 17:33:03.975427 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-8rs7z_ebeafd5f-91e7-46a1-a3dd-b96267cfbf8f/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:33:04 crc kubenswrapper[4768]: I1203 17:33:04.121119 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-f7vnp_e7056232-6bbb-46d2-b15b-79dca6a43cb4/ssh-known-hosts-edpm-deployment/0.log" Dec 03 17:33:04 crc kubenswrapper[4768]: I1203 17:33:04.429566 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-58c4c66bd9-w8lwh_53c419ad-7c96-450d-be91-ae1598cfd390/proxy-server/0.log" Dec 03 17:33:04 crc kubenswrapper[4768]: I1203 17:33:04.588119 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-58c4c66bd9-w8lwh_53c419ad-7c96-450d-be91-ae1598cfd390/proxy-httpd/0.log" Dec 03 17:33:04 crc kubenswrapper[4768]: I1203 17:33:04.670911 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-rxvbr_43a9322c-e5fe-40d8-849f-dc84a5763f9c/swift-ring-rebalance/0.log" Dec 03 17:33:04 crc kubenswrapper[4768]: I1203 17:33:04.812628 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-reaper/0.log" Dec 03 17:33:04 crc kubenswrapper[4768]: I1203 17:33:04.818013 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-auditor/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.157565 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-replicator/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.190311 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/account-server/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.320722 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-auditor/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.344664 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-replicator/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.447026 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-server/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.493101 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/container-updater/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.601892 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-expirer/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.661762 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-auditor/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.715932 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-replicator/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.762754 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-server/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.901883 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/object-updater/0.log" Dec 03 17:33:05 crc kubenswrapper[4768]: I1203 17:33:05.928391 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/rsync/0.log" Dec 03 17:33:06 crc kubenswrapper[4768]: I1203 17:33:06.046663 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_147901f2-6caa-4983-8e45-7e938cd9f36b/swift-recon-cron/0.log" Dec 03 17:33:06 crc kubenswrapper[4768]: I1203 17:33:06.194240 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-2mrjp_4f7d210c-5ea0-4b66-88f7-d8830a52109c/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:33:06 crc kubenswrapper[4768]: I1203 17:33:06.468688 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_430c4af3-d01b-4096-b87c-4adce312cb1b/tempest-tests-tempest-tests-runner/0.log" Dec 03 17:33:06 crc kubenswrapper[4768]: I1203 17:33:06.505032 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_729081b2-0022-49f3-9ebc-8640c6de0a0a/test-operator-logs-container/0.log" Dec 03 17:33:06 crc kubenswrapper[4768]: I1203 17:33:06.667620 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-m9vxr_581e01fb-3c2a-4c39-926d-c25aebdfae5e/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 03 17:33:07 crc kubenswrapper[4768]: I1203 17:33:07.532954 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:33:07 crc kubenswrapper[4768]: E1203 17:33:07.533318 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:33:10 crc kubenswrapper[4768]: I1203 17:33:10.028952 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_c42c7f90-2ae1-4222-864c-b8f7f1733beb/memcached/0.log" Dec 03 17:33:18 crc kubenswrapper[4768]: I1203 17:33:18.531523 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:33:18 crc kubenswrapper[4768]: E1203 17:33:18.532868 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:33:33 crc kubenswrapper[4768]: I1203 17:33:33.539177 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:33:33 crc kubenswrapper[4768]: E1203 17:33:33.542616 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:33:38 crc kubenswrapper[4768]: I1203 17:33:38.823525 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-g5nnn_8cb78567-ca7b-4a8b-9f94-b503727cf509/kube-rbac-proxy/0.log" Dec 03 17:33:38 crc kubenswrapper[4768]: I1203 17:33:38.926055 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-g5nnn_8cb78567-ca7b-4a8b-9f94-b503727cf509/manager/0.log" Dec 03 17:33:39 crc kubenswrapper[4768]: I1203 17:33:39.085054 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-jgz9x_0eb6c4a6-a68d-4d28-9b09-64a3dd981978/kube-rbac-proxy/0.log" Dec 03 17:33:39 crc kubenswrapper[4768]: I1203 17:33:39.112982 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-jgz9x_0eb6c4a6-a68d-4d28-9b09-64a3dd981978/manager/0.log" Dec 03 17:33:39 crc kubenswrapper[4768]: I1203 17:33:39.954282 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hg9tx_ff2d8ce7-0093-406f-982e-dac8b2b62593/kube-rbac-proxy/0.log" Dec 03 17:33:39 crc kubenswrapper[4768]: I1203 17:33:39.955648 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/util/0.log" Dec 03 17:33:39 crc kubenswrapper[4768]: I1203 17:33:39.987613 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hg9tx_ff2d8ce7-0093-406f-982e-dac8b2b62593/manager/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.214391 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/pull/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.220064 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/util/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.263043 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/pull/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.412258 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/util/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.430009 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/pull/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.440640 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_fa0d5ece5b46e258d324e4876bc1c38d59827d2e3aa277f6ff66c644f4clz4b_6c41b0af-ccc3-49e5-a009-0ab5ea153ebb/extract/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.632527 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5mfx6_ac20b433-8d19-4ffc-a3d8-001ab7660cfb/manager/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.664385 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-5mfx6_ac20b433-8d19-4ffc-a3d8-001ab7660cfb/kube-rbac-proxy/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.740993 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-m97mz_085d4818-0975-441d-87fc-8c22aa78d86f/kube-rbac-proxy/0.log" Dec 03 17:33:40 crc kubenswrapper[4768]: I1203 17:33:40.820778 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-m97mz_085d4818-0975-441d-87fc-8c22aa78d86f/manager/0.log" Dec 03 17:33:41 crc kubenswrapper[4768]: I1203 17:33:41.072635 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-m8lmv_f91ea1ca-d4a3-47c9-a5a8-38a78224668a/kube-rbac-proxy/0.log" Dec 03 17:33:41 crc kubenswrapper[4768]: I1203 17:33:41.134050 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-m8lmv_f91ea1ca-d4a3-47c9-a5a8-38a78224668a/manager/0.log" Dec 03 17:33:41 crc kubenswrapper[4768]: I1203 17:33:41.660093 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9q857_ad48f666-a22a-4d97-9736-5f284268bd4a/kube-rbac-proxy/0.log" Dec 03 17:33:41 crc kubenswrapper[4768]: I1203 17:33:41.701240 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-vj7sm_3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1/kube-rbac-proxy/0.log" Dec 03 17:33:41 crc kubenswrapper[4768]: I1203 17:33:41.737969 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-vj7sm_3aa0ddf8-3f50-4927-b8c7-ce32cb4676c1/manager/0.log" Dec 03 17:33:41 crc kubenswrapper[4768]: I1203 17:33:41.913455 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9q857_ad48f666-a22a-4d97-9736-5f284268bd4a/manager/0.log" Dec 03 17:33:41 crc kubenswrapper[4768]: I1203 17:33:41.965261 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-7pfgm_dc9eedd8-2956-447b-9a21-7b71bcb0c8c4/kube-rbac-proxy/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.046479 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-7pfgm_dc9eedd8-2956-447b-9a21-7b71bcb0c8c4/manager/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.132108 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-zv6pv_a9dcaa43-ad02-45aa-a320-dd9d2c609bf4/kube-rbac-proxy/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.231844 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-zv6pv_a9dcaa43-ad02-45aa-a320-dd9d2c609bf4/manager/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.378221 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9n7sr_34791f4b-32bc-44e5-90ca-ec286f96fe15/kube-rbac-proxy/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.436822 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-9n7sr_34791f4b-32bc-44e5-90ca-ec286f96fe15/manager/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.529530 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-s6lpx_7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa/kube-rbac-proxy/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.666457 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-s6lpx_7c452eb1-dcc1-4f3f-a562-9a60e28cd9aa/manager/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.763671 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bfpmf_29a3455b-b1d4-496e-936b-348846b289e0/kube-rbac-proxy/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.872681 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-bfpmf_29a3455b-b1d4-496e-936b-348846b289e0/manager/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.908521 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cnc7h_bec968a9-b8ec-48f3-9625-96ce1f1e2dda/manager/0.log" Dec 03 17:33:42 crc kubenswrapper[4768]: I1203 17:33:42.936898 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cnc7h_bec968a9-b8ec-48f3-9625-96ce1f1e2dda/kube-rbac-proxy/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.024509 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp_bae1e6ba-54bf-411a-a2b9-b79b8ff85210/kube-rbac-proxy/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.075546 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4v2hlp_bae1e6ba-54bf-411a-a2b9-b79b8ff85210/manager/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.382499 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-cqdlr_f8eb7f48-77fa-4d4b-a59b-94ed7a1e1a2f/registry-server/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.518436 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-65d54995fc-qt7jc_a616a1fc-015c-4f96-ab87-cb3fe397e123/operator/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.649704 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jwxs2_f6440acf-55b8-48fb-b212-550dcc9e9600/kube-rbac-proxy/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.699620 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jwxs2_f6440acf-55b8-48fb-b212-550dcc9e9600/manager/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.731025 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-qt8cv_982d5154-f537-4205-b268-3ce9aa7bdc37/kube-rbac-proxy/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.960312 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-qt8cv_982d5154-f537-4205-b268-3ce9aa7bdc37/manager/0.log" Dec 03 17:33:43 crc kubenswrapper[4768]: I1203 17:33:43.986862 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-vkjxc_f168e437-903a-4624-a0bc-95ea6b0e1789/operator/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.128800 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-n2qpd_347b8067-6147-477e-b00b-a5a60a29b7d8/kube-rbac-proxy/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.222829 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-n2qpd_347b8067-6147-477e-b00b-a5a60a29b7d8/manager/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.261694 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5b6647b8f8-ztfkl_f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5/kube-rbac-proxy/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.357591 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-df58498df-fdv5l_5a2dd1fe-2811-43db-959d-aceff599106d/manager/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.492820 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d2m42_e3af024c-b6f0-45c8-b5ab-6873b661878e/kube-rbac-proxy/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.517442 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d2m42_e3af024c-b6f0-45c8-b5ab-6873b661878e/manager/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.731677 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lg92p_051e5034-1155-4000-9d5b-96ee80ba6968/manager/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.779950 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5b6647b8f8-ztfkl_f1ad0bfb-c581-4a0c-92ab-2c702d87f3e5/manager/0.log" Dec 03 17:33:44 crc kubenswrapper[4768]: I1203 17:33:44.991224 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-lg92p_051e5034-1155-4000-9d5b-96ee80ba6968/kube-rbac-proxy/0.log" Dec 03 17:33:46 crc kubenswrapper[4768]: I1203 17:33:46.531739 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:33:46 crc kubenswrapper[4768]: E1203 17:33:46.532183 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:33:57 crc kubenswrapper[4768]: I1203 17:33:57.531912 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:33:57 crc kubenswrapper[4768]: E1203 17:33:57.532920 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:34:05 crc kubenswrapper[4768]: I1203 17:34:05.135418 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-sctcd_eb3106ce-6e9d-4bb7-bf0c-57ac60bbc925/control-plane-machine-set-operator/0.log" Dec 03 17:34:05 crc kubenswrapper[4768]: I1203 17:34:05.324114 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nqmfz_cadb4efb-b28b-43fc-883f-6cf96d18af72/kube-rbac-proxy/0.log" Dec 03 17:34:05 crc kubenswrapper[4768]: I1203 17:34:05.324257 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-nqmfz_cadb4efb-b28b-43fc-883f-6cf96d18af72/machine-api-operator/0.log" Dec 03 17:34:12 crc kubenswrapper[4768]: I1203 17:34:12.532407 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:34:12 crc kubenswrapper[4768]: E1203 17:34:12.534038 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:34:18 crc kubenswrapper[4768]: I1203 17:34:18.335548 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-btgph_27f311f3-af6d-45b4-8e9f-b4437d56350c/cert-manager-controller/0.log" Dec 03 17:34:18 crc kubenswrapper[4768]: I1203 17:34:18.490630 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-k2zl7_540cd9b8-af71-4d61-bdab-50850c4eec6d/cert-manager-cainjector/0.log" Dec 03 17:34:18 crc kubenswrapper[4768]: I1203 17:34:18.581190 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-4l62p_ae9d8259-9e85-404b-8a1e-909147ffb4a7/cert-manager-webhook/0.log" Dec 03 17:34:27 crc kubenswrapper[4768]: I1203 17:34:27.531320 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:34:27 crc kubenswrapper[4768]: E1203 17:34:27.532288 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:34:32 crc kubenswrapper[4768]: I1203 17:34:32.250006 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-79jcg_fee7ecf6-cf74-41de-b6f7-16e83ab2cd84/nmstate-console-plugin/0.log" Dec 03 17:34:33 crc kubenswrapper[4768]: I1203 17:34:33.046132 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-v6x5p_6aff85f3-8f5e-44d9-be27-1bc63b1d8a38/nmstate-handler/0.log" Dec 03 17:34:33 crc kubenswrapper[4768]: I1203 17:34:33.069192 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ggfpn_3f27fdbd-f4fd-462c-9931-deb08bc97037/nmstate-metrics/0.log" Dec 03 17:34:33 crc kubenswrapper[4768]: I1203 17:34:33.078549 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ggfpn_3f27fdbd-f4fd-462c-9931-deb08bc97037/kube-rbac-proxy/0.log" Dec 03 17:34:33 crc kubenswrapper[4768]: I1203 17:34:33.312230 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-b4qp7_e4518900-d9d4-4ffd-a217-d8506b6d3027/nmstate-operator/0.log" Dec 03 17:34:33 crc kubenswrapper[4768]: I1203 17:34:33.387710 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-727lm_d339ee13-d547-4fa6-a7bb-17eabe43d15c/nmstate-webhook/0.log" Dec 03 17:34:40 crc kubenswrapper[4768]: I1203 17:34:40.531505 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:34:40 crc kubenswrapper[4768]: E1203 17:34:40.532530 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:34:48 crc kubenswrapper[4768]: I1203 17:34:48.353084 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/kube-rbac-proxy/0.log" Dec 03 17:34:48 crc kubenswrapper[4768]: I1203 17:34:48.400457 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/manager/0.log" Dec 03 17:34:55 crc kubenswrapper[4768]: I1203 17:34:55.532270 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:34:55 crc kubenswrapper[4768]: E1203 17:34:55.534652 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:35:01 crc kubenswrapper[4768]: I1203 17:35:01.546664 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-sbbq7_8bb39058-0f85-42fe-884e-f7ea6e389a1e/kube-rbac-proxy/0.log" Dec 03 17:35:01 crc kubenswrapper[4768]: I1203 17:35:01.660951 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-sbbq7_8bb39058-0f85-42fe-884e-f7ea6e389a1e/controller/0.log" Dec 03 17:35:01 crc kubenswrapper[4768]: I1203 17:35:01.790957 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.010860 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.026920 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.066235 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.088895 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.241714 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.267705 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.294847 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.307687 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.465565 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-frr-files/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.499954 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-metrics/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.508443 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/cp-reloader/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.540425 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/controller/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.748175 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/frr-metrics/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.769551 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/kube-rbac-proxy-frr/0.log" Dec 03 17:35:02 crc kubenswrapper[4768]: I1203 17:35:02.784250 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/kube-rbac-proxy/0.log" Dec 03 17:35:03 crc kubenswrapper[4768]: I1203 17:35:03.034091 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/reloader/0.log" Dec 03 17:35:03 crc kubenswrapper[4768]: I1203 17:35:03.051829 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-nlk8z_3eb4dbc3-d0f3-42bd-8d09-0af1ae304716/frr-k8s-webhook-server/0.log" Dec 03 17:35:03 crc kubenswrapper[4768]: I1203 17:35:03.276141 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-57f5c9498-vdmjc_7b3a5fc2-f2ec-4a7a-815f-9b8c91db65c3/manager/0.log" Dec 03 17:35:03 crc kubenswrapper[4768]: I1203 17:35:03.488252 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-98db5c7f-g87m7_6fb7770c-b85a-4bd3-9f49-dedffaeae0e3/webhook-server/0.log" Dec 03 17:35:03 crc kubenswrapper[4768]: I1203 17:35:03.614225 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-79sgk_6c148908-2f51-41ee-adb8-bfd5cb821ebf/kube-rbac-proxy/0.log" Dec 03 17:35:04 crc kubenswrapper[4768]: I1203 17:35:04.145448 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-79sgk_6c148908-2f51-41ee-adb8-bfd5cb821ebf/speaker/0.log" Dec 03 17:35:04 crc kubenswrapper[4768]: I1203 17:35:04.310655 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-k7czv_e9282b00-b418-4626-9620-8ca6252433b2/frr/0.log" Dec 03 17:35:09 crc kubenswrapper[4768]: I1203 17:35:09.532534 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:35:09 crc kubenswrapper[4768]: E1203 17:35:09.533905 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:35:18 crc kubenswrapper[4768]: I1203 17:35:18.983742 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/util/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.176362 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/util/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.208099 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/pull/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.245450 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/pull/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.508756 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/util/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.509168 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/extract/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.509885 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_03c6e0f8bd928fdcaaf530d547155f7eef49635d3e29724a094c0ab694wdpgb_45f2ba97-fe29-4c2e-949f-3e7d0243d7e3/pull/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.700664 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/util/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.877729 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/pull/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.879060 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/pull/0.log" Dec 03 17:35:19 crc kubenswrapper[4768]: I1203 17:35:19.953200 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/util/0.log" Dec 03 17:35:20 crc kubenswrapper[4768]: I1203 17:35:20.307247 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/util/0.log" Dec 03 17:35:20 crc kubenswrapper[4768]: I1203 17:35:20.312782 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/pull/0.log" Dec 03 17:35:20 crc kubenswrapper[4768]: I1203 17:35:20.346340 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_142e5edc705b0443a978f15b9d74db4e11d2db1d26a61e7f8c9e49e303zvj2p_4a915754-d391-4329-91eb-40f99fcebdad/extract/0.log" Dec 03 17:35:20 crc kubenswrapper[4768]: I1203 17:35:20.955802 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/util/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.098372 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/util/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.128657 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/pull/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.137719 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/pull/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.303712 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/util/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.325494 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/pull/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.331021 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212ft5dc7_3e3bf1c3-070b-49af-98bd-be91dbd82bae/extract/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.485041 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/util/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.533619 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:35:21 crc kubenswrapper[4768]: E1203 17:35:21.533988 4768 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g94rv_openshift-machine-config-operator(fea3ff9c-dadb-4168-90c0-24bc05a888e9)\"" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.789644 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/util/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.790258 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/pull/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.790939 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/pull/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.944015 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/util/0.log" Dec 03 17:35:21 crc kubenswrapper[4768]: I1203 17:35:21.983041 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/extract/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.010065 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210xt5lb_c013a14e-13e8-4979-95b9-948abf069cdd/pull/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.203733 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/util/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.405025 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/pull/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.407630 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/util/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.408696 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/pull/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.568138 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/util/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.639087 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/pull/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.650010 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dc2v4_8101fa8a-2d2c-4622-973c-a805443c3269/extract/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.771232 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-utilities/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.946629 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-content/0.log" Dec 03 17:35:22 crc kubenswrapper[4768]: I1203 17:35:22.958737 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-utilities/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.009901 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-content/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.185016 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-content/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.186229 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/extract-utilities/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.228809 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-utilities/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.561983 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-content/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.611836 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-content/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.684392 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-utilities/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.847581 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-z8vgf_f9578af8-3d78-4487-a6e0-57d79ebe218e/registry-server/0.log" Dec 03 17:35:23 crc kubenswrapper[4768]: I1203 17:35:23.869861 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-content/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.137287 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/extract-utilities/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.140897 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-vl5v6_5897d8d4-04ee-4f56-81c1-bdcd96028ec4/registry-server/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.332134 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-9jljw_60d2c487-bb7a-43ee-a699-906a81e5627d/marketplace-operator/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.384434 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-utilities/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.560309 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-utilities/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.565682 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-content/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.611379 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-content/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.754072 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-content/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.787227 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/extract-utilities/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.878728 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-utilities/0.log" Dec 03 17:35:24 crc kubenswrapper[4768]: I1203 17:35:24.944247 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-pwrl8_737e36ee-70f0-4076-af72-83d09a86268b/registry-server/0.log" Dec 03 17:35:25 crc kubenswrapper[4768]: I1203 17:35:25.019051 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-content/0.log" Dec 03 17:35:25 crc kubenswrapper[4768]: I1203 17:35:25.051341 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-utilities/0.log" Dec 03 17:35:25 crc kubenswrapper[4768]: I1203 17:35:25.107867 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-content/0.log" Dec 03 17:35:25 crc kubenswrapper[4768]: I1203 17:35:25.237127 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-utilities/0.log" Dec 03 17:35:25 crc kubenswrapper[4768]: I1203 17:35:25.284629 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/extract-content/0.log" Dec 03 17:35:25 crc kubenswrapper[4768]: I1203 17:35:25.981909 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-q4gkq_e1e80c42-cfda-453e-8634-a8e2ad23991f/registry-server/0.log" Dec 03 17:35:32 crc kubenswrapper[4768]: I1203 17:35:32.531771 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:35:33 crc kubenswrapper[4768]: I1203 17:35:33.014526 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"4fb23c0558bff3cc5d90f09a76ce856d0c0d0318cec70c18d259eca607f4921c"} Dec 03 17:35:40 crc kubenswrapper[4768]: I1203 17:35:40.716427 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-cgfsr_48a3a666-c857-4b4f-858c-43bc2f9d6f08/prometheus-operator/0.log" Dec 03 17:35:40 crc kubenswrapper[4768]: I1203 17:35:40.894554 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7bff46bcd6-9mzm8_286fed40-67a8-4eab-9ca8-3c7609503df1/prometheus-operator-admission-webhook/0.log" Dec 03 17:35:41 crc kubenswrapper[4768]: I1203 17:35:41.520708 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-7bff46bcd6-flrwf_cff0ca88-1474-46c7-b046-cec35a7d2409/prometheus-operator-admission-webhook/0.log" Dec 03 17:35:41 crc kubenswrapper[4768]: I1203 17:35:41.536722 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-7wglg_37340e28-1544-4f32-aed0-4c1d277cbf95/operator/0.log" Dec 03 17:35:41 crc kubenswrapper[4768]: I1203 17:35:41.706137 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-hrj6l_cebba08d-4a33-458a-9893-e717d6359f90/perses-operator/0.log" Dec 03 17:35:54 crc kubenswrapper[4768]: I1203 17:35:54.351325 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/kube-rbac-proxy/0.log" Dec 03 17:35:54 crc kubenswrapper[4768]: I1203 17:35:54.390505 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-6f7789658f-x4dsz_08a42ef1-90ae-4368-870e-e9bda0d806b0/manager/0.log" Dec 03 17:37:38 crc kubenswrapper[4768]: I1203 17:37:38.402721 4768 generic.go:334] "Generic (PLEG): container finished" podID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerID="b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4" exitCode=0 Dec 03 17:37:38 crc kubenswrapper[4768]: I1203 17:37:38.402814 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" event={"ID":"2df97f25-8950-46d5-961a-6ccbb6cc1f50","Type":"ContainerDied","Data":"b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4"} Dec 03 17:37:38 crc kubenswrapper[4768]: I1203 17:37:38.404240 4768 scope.go:117] "RemoveContainer" containerID="b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4" Dec 03 17:37:39 crc kubenswrapper[4768]: I1203 17:37:39.365452 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m2g2q_must-gather-bnw4f_2df97f25-8950-46d5-961a-6ccbb6cc1f50/gather/0.log" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.552992 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-77r2v"] Dec 03 17:37:50 crc kubenswrapper[4768]: E1203 17:37:50.561347 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ab5d060-e953-44ff-ab7e-641f32687662" containerName="container-00" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.561391 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ab5d060-e953-44ff-ab7e-641f32687662" containerName="container-00" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.561728 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ab5d060-e953-44ff-ab7e-641f32687662" containerName="container-00" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.563636 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.579155 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-77r2v"] Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.615584 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjg82\" (UniqueName: \"kubernetes.io/projected/140d5e00-3015-4a9f-ace2-43ec13bfae6f-kube-api-access-gjg82\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.615937 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-utilities\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.615996 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-catalog-content\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.717295 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-catalog-content\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.717506 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjg82\" (UniqueName: \"kubernetes.io/projected/140d5e00-3015-4a9f-ace2-43ec13bfae6f-kube-api-access-gjg82\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.717561 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-utilities\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.718142 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-catalog-content\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.718150 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-utilities\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.751330 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjg82\" (UniqueName: \"kubernetes.io/projected/140d5e00-3015-4a9f-ace2-43ec13bfae6f-kube-api-access-gjg82\") pod \"community-operators-77r2v\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:50 crc kubenswrapper[4768]: I1203 17:37:50.887705 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:37:51 crc kubenswrapper[4768]: I1203 17:37:51.574691 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m2g2q/must-gather-bnw4f"] Dec 03 17:37:51 crc kubenswrapper[4768]: I1203 17:37:51.575277 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerName="copy" containerID="cri-o://00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba" gracePeriod=2 Dec 03 17:37:51 crc kubenswrapper[4768]: I1203 17:37:51.603934 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m2g2q/must-gather-bnw4f"] Dec 03 17:37:51 crc kubenswrapper[4768]: I1203 17:37:51.620061 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-77r2v"] Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.135102 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m2g2q_must-gather-bnw4f_2df97f25-8950-46d5-961a-6ccbb6cc1f50/copy/0.log" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.138627 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.272178 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2df97f25-8950-46d5-961a-6ccbb6cc1f50-must-gather-output\") pod \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.272227 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fhsp\" (UniqueName: \"kubernetes.io/projected/2df97f25-8950-46d5-961a-6ccbb6cc1f50-kube-api-access-7fhsp\") pod \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\" (UID: \"2df97f25-8950-46d5-961a-6ccbb6cc1f50\") " Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.279338 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2df97f25-8950-46d5-961a-6ccbb6cc1f50-kube-api-access-7fhsp" (OuterVolumeSpecName: "kube-api-access-7fhsp") pod "2df97f25-8950-46d5-961a-6ccbb6cc1f50" (UID: "2df97f25-8950-46d5-961a-6ccbb6cc1f50"). InnerVolumeSpecName "kube-api-access-7fhsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.375206 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fhsp\" (UniqueName: \"kubernetes.io/projected/2df97f25-8950-46d5-961a-6ccbb6cc1f50-kube-api-access-7fhsp\") on node \"crc\" DevicePath \"\"" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.441678 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2df97f25-8950-46d5-961a-6ccbb6cc1f50-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "2df97f25-8950-46d5-961a-6ccbb6cc1f50" (UID: "2df97f25-8950-46d5-961a-6ccbb6cc1f50"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.477292 4768 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2df97f25-8950-46d5-961a-6ccbb6cc1f50-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.553721 4768 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m2g2q_must-gather-bnw4f_2df97f25-8950-46d5-961a-6ccbb6cc1f50/copy/0.log" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.554285 4768 generic.go:334] "Generic (PLEG): container finished" podID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerID="00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba" exitCode=143 Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.554339 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m2g2q/must-gather-bnw4f" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.554391 4768 scope.go:117] "RemoveContainer" containerID="00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.557008 4768 generic.go:334] "Generic (PLEG): container finished" podID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerID="ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb" exitCode=0 Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.557055 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77r2v" event={"ID":"140d5e00-3015-4a9f-ace2-43ec13bfae6f","Type":"ContainerDied","Data":"ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb"} Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.557085 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77r2v" event={"ID":"140d5e00-3015-4a9f-ace2-43ec13bfae6f","Type":"ContainerStarted","Data":"995b4102d9b509821b3154a688e4f9c7a8753ff63bff4476f12954f9ee0097fb"} Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.559984 4768 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.583496 4768 scope.go:117] "RemoveContainer" containerID="b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.631521 4768 scope.go:117] "RemoveContainer" containerID="00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba" Dec 03 17:37:52 crc kubenswrapper[4768]: E1203 17:37:52.631920 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba\": container with ID starting with 00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba not found: ID does not exist" containerID="00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.631963 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba"} err="failed to get container status \"00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba\": rpc error: code = NotFound desc = could not find container \"00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba\": container with ID starting with 00b82f0cee8308b1b0d4b5f15270fdebc1f448515a0dcbb82fae46c0abdbccba not found: ID does not exist" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.631991 4768 scope.go:117] "RemoveContainer" containerID="b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4" Dec 03 17:37:52 crc kubenswrapper[4768]: E1203 17:37:52.632241 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4\": container with ID starting with b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4 not found: ID does not exist" containerID="b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4" Dec 03 17:37:52 crc kubenswrapper[4768]: I1203 17:37:52.632278 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4"} err="failed to get container status \"b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4\": rpc error: code = NotFound desc = could not find container \"b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4\": container with ID starting with b5caf02cda40e1f162d28326cde0a7201eb240bf1918b6e57461fbf8c3cd6cd4 not found: ID does not exist" Dec 03 17:37:53 crc kubenswrapper[4768]: I1203 17:37:53.622314 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" path="/var/lib/kubelet/pods/2df97f25-8950-46d5-961a-6ccbb6cc1f50/volumes" Dec 03 17:37:54 crc kubenswrapper[4768]: I1203 17:37:54.628728 4768 generic.go:334] "Generic (PLEG): container finished" podID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerID="dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6" exitCode=0 Dec 03 17:37:54 crc kubenswrapper[4768]: I1203 17:37:54.628776 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77r2v" event={"ID":"140d5e00-3015-4a9f-ace2-43ec13bfae6f","Type":"ContainerDied","Data":"dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6"} Dec 03 17:37:55 crc kubenswrapper[4768]: I1203 17:37:55.642578 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77r2v" event={"ID":"140d5e00-3015-4a9f-ace2-43ec13bfae6f","Type":"ContainerStarted","Data":"1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238"} Dec 03 17:37:55 crc kubenswrapper[4768]: I1203 17:37:55.671238 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-77r2v" podStartSLOduration=3.0793424 podStartE2EDuration="5.671214236s" podCreationTimestamp="2025-12-03 17:37:50 +0000 UTC" firstStartedPulling="2025-12-03 17:37:52.559658349 +0000 UTC m=+4769.478994782" lastFinishedPulling="2025-12-03 17:37:55.151530195 +0000 UTC m=+4772.070866618" observedRunningTime="2025-12-03 17:37:55.661019551 +0000 UTC m=+4772.580355984" watchObservedRunningTime="2025-12-03 17:37:55.671214236 +0000 UTC m=+4772.590550659" Dec 03 17:37:56 crc kubenswrapper[4768]: I1203 17:37:56.028313 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:37:56 crc kubenswrapper[4768]: I1203 17:37:56.028368 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:38:00 crc kubenswrapper[4768]: I1203 17:38:00.888316 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:38:00 crc kubenswrapper[4768]: I1203 17:38:00.888934 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:38:00 crc kubenswrapper[4768]: I1203 17:38:00.947961 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:38:01 crc kubenswrapper[4768]: I1203 17:38:01.753363 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:38:01 crc kubenswrapper[4768]: I1203 17:38:01.810868 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-77r2v"] Dec 03 17:38:03 crc kubenswrapper[4768]: I1203 17:38:03.718722 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-77r2v" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="registry-server" containerID="cri-o://1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238" gracePeriod=2 Dec 03 17:38:03 crc kubenswrapper[4768]: E1203 17:38:03.876736 4768 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod140d5e00_3015_4a9f_ace2_43ec13bfae6f.slice/crio-1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238.scope\": RecentStats: unable to find data in memory cache]" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.296433 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.402896 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-catalog-content\") pod \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.403107 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-utilities\") pod \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.403297 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjg82\" (UniqueName: \"kubernetes.io/projected/140d5e00-3015-4a9f-ace2-43ec13bfae6f-kube-api-access-gjg82\") pod \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\" (UID: \"140d5e00-3015-4a9f-ace2-43ec13bfae6f\") " Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.404078 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-utilities" (OuterVolumeSpecName: "utilities") pod "140d5e00-3015-4a9f-ace2-43ec13bfae6f" (UID: "140d5e00-3015-4a9f-ace2-43ec13bfae6f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.410752 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/140d5e00-3015-4a9f-ace2-43ec13bfae6f-kube-api-access-gjg82" (OuterVolumeSpecName: "kube-api-access-gjg82") pod "140d5e00-3015-4a9f-ace2-43ec13bfae6f" (UID: "140d5e00-3015-4a9f-ace2-43ec13bfae6f"). InnerVolumeSpecName "kube-api-access-gjg82". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.463977 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "140d5e00-3015-4a9f-ace2-43ec13bfae6f" (UID: "140d5e00-3015-4a9f-ace2-43ec13bfae6f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.506129 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.506872 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/140d5e00-3015-4a9f-ace2-43ec13bfae6f-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.506962 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjg82\" (UniqueName: \"kubernetes.io/projected/140d5e00-3015-4a9f-ace2-43ec13bfae6f-kube-api-access-gjg82\") on node \"crc\" DevicePath \"\"" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.730943 4768 generic.go:334] "Generic (PLEG): container finished" podID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerID="1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238" exitCode=0 Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.730995 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77r2v" event={"ID":"140d5e00-3015-4a9f-ace2-43ec13bfae6f","Type":"ContainerDied","Data":"1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238"} Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.731038 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77r2v" event={"ID":"140d5e00-3015-4a9f-ace2-43ec13bfae6f","Type":"ContainerDied","Data":"995b4102d9b509821b3154a688e4f9c7a8753ff63bff4476f12954f9ee0097fb"} Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.731057 4768 scope.go:117] "RemoveContainer" containerID="1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.731104 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77r2v" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.773711 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-77r2v"] Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.774313 4768 scope.go:117] "RemoveContainer" containerID="dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.787656 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-77r2v"] Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.797871 4768 scope.go:117] "RemoveContainer" containerID="ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.856509 4768 scope.go:117] "RemoveContainer" containerID="1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238" Dec 03 17:38:04 crc kubenswrapper[4768]: E1203 17:38:04.856909 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238\": container with ID starting with 1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238 not found: ID does not exist" containerID="1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.856940 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238"} err="failed to get container status \"1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238\": rpc error: code = NotFound desc = could not find container \"1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238\": container with ID starting with 1d1cd5e64a4a9720e4928750d5eccf531e276d6f09b978b6d06f96d407ab1238 not found: ID does not exist" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.856962 4768 scope.go:117] "RemoveContainer" containerID="dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6" Dec 03 17:38:04 crc kubenswrapper[4768]: E1203 17:38:04.857497 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6\": container with ID starting with dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6 not found: ID does not exist" containerID="dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.857534 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6"} err="failed to get container status \"dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6\": rpc error: code = NotFound desc = could not find container \"dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6\": container with ID starting with dff0ea1a5d7ab864a7b8b77a20e0c879b15720fda83acf35a950c9ab61238ac6 not found: ID does not exist" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.857555 4768 scope.go:117] "RemoveContainer" containerID="ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb" Dec 03 17:38:04 crc kubenswrapper[4768]: E1203 17:38:04.857881 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb\": container with ID starting with ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb not found: ID does not exist" containerID="ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb" Dec 03 17:38:04 crc kubenswrapper[4768]: I1203 17:38:04.857900 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb"} err="failed to get container status \"ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb\": rpc error: code = NotFound desc = could not find container \"ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb\": container with ID starting with ca477a09fb3bce78e3dcef1e40f9dabcb25abc4a7d73848cf678a75fc3f4e1eb not found: ID does not exist" Dec 03 17:38:05 crc kubenswrapper[4768]: I1203 17:38:05.544342 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" path="/var/lib/kubelet/pods/140d5e00-3015-4a9f-ace2-43ec13bfae6f/volumes" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.864759 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gx8f6"] Dec 03 17:38:13 crc kubenswrapper[4768]: E1203 17:38:13.865488 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerName="copy" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865501 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerName="copy" Dec 03 17:38:13 crc kubenswrapper[4768]: E1203 17:38:13.865519 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerName="gather" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865525 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerName="gather" Dec 03 17:38:13 crc kubenswrapper[4768]: E1203 17:38:13.865537 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="extract-utilities" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865545 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="extract-utilities" Dec 03 17:38:13 crc kubenswrapper[4768]: E1203 17:38:13.865566 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="registry-server" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865572 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="registry-server" Dec 03 17:38:13 crc kubenswrapper[4768]: E1203 17:38:13.865589 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="extract-content" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865617 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="extract-content" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865808 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerName="copy" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865825 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="2df97f25-8950-46d5-961a-6ccbb6cc1f50" containerName="gather" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.865841 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="140d5e00-3015-4a9f-ace2-43ec13bfae6f" containerName="registry-server" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.867800 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.885102 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gx8f6"] Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.910505 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqz9k\" (UniqueName: \"kubernetes.io/projected/6b58b54b-3599-440c-9ba0-e531eaf2b500-kube-api-access-cqz9k\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.910635 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-utilities\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:13 crc kubenswrapper[4768]: I1203 17:38:13.910666 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-catalog-content\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.012194 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqz9k\" (UniqueName: \"kubernetes.io/projected/6b58b54b-3599-440c-9ba0-e531eaf2b500-kube-api-access-cqz9k\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.012348 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-utilities\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.012386 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-catalog-content\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.013179 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-catalog-content\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.013269 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-utilities\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.037343 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqz9k\" (UniqueName: \"kubernetes.io/projected/6b58b54b-3599-440c-9ba0-e531eaf2b500-kube-api-access-cqz9k\") pod \"redhat-operators-gx8f6\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.216309 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.734431 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gx8f6"] Dec 03 17:38:14 crc kubenswrapper[4768]: I1203 17:38:14.835793 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gx8f6" event={"ID":"6b58b54b-3599-440c-9ba0-e531eaf2b500","Type":"ContainerStarted","Data":"0ba41d99c1d0b0d69f0fc0d90a9bfa6cb5fafb11e18e1471416eabf26043ac4b"} Dec 03 17:38:15 crc kubenswrapper[4768]: I1203 17:38:15.848523 4768 generic.go:334] "Generic (PLEG): container finished" podID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerID="5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c" exitCode=0 Dec 03 17:38:15 crc kubenswrapper[4768]: I1203 17:38:15.848580 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gx8f6" event={"ID":"6b58b54b-3599-440c-9ba0-e531eaf2b500","Type":"ContainerDied","Data":"5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c"} Dec 03 17:38:17 crc kubenswrapper[4768]: I1203 17:38:17.870926 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gx8f6" event={"ID":"6b58b54b-3599-440c-9ba0-e531eaf2b500","Type":"ContainerStarted","Data":"740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931"} Dec 03 17:38:19 crc kubenswrapper[4768]: I1203 17:38:19.892312 4768 generic.go:334] "Generic (PLEG): container finished" podID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerID="740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931" exitCode=0 Dec 03 17:38:19 crc kubenswrapper[4768]: I1203 17:38:19.892508 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gx8f6" event={"ID":"6b58b54b-3599-440c-9ba0-e531eaf2b500","Type":"ContainerDied","Data":"740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931"} Dec 03 17:38:20 crc kubenswrapper[4768]: I1203 17:38:20.906779 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gx8f6" event={"ID":"6b58b54b-3599-440c-9ba0-e531eaf2b500","Type":"ContainerStarted","Data":"ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8"} Dec 03 17:38:20 crc kubenswrapper[4768]: I1203 17:38:20.941166 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gx8f6" podStartSLOduration=3.486880201 podStartE2EDuration="7.941141445s" podCreationTimestamp="2025-12-03 17:38:13 +0000 UTC" firstStartedPulling="2025-12-03 17:38:15.851512885 +0000 UTC m=+4792.770849308" lastFinishedPulling="2025-12-03 17:38:20.305774119 +0000 UTC m=+4797.225110552" observedRunningTime="2025-12-03 17:38:20.931623958 +0000 UTC m=+4797.850960391" watchObservedRunningTime="2025-12-03 17:38:20.941141445 +0000 UTC m=+4797.860477868" Dec 03 17:38:24 crc kubenswrapper[4768]: I1203 17:38:24.218850 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:24 crc kubenswrapper[4768]: I1203 17:38:24.219208 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:25 crc kubenswrapper[4768]: I1203 17:38:25.376437 4768 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gx8f6" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="registry-server" probeResult="failure" output=< Dec 03 17:38:25 crc kubenswrapper[4768]: timeout: failed to connect service ":50051" within 1s Dec 03 17:38:25 crc kubenswrapper[4768]: > Dec 03 17:38:26 crc kubenswrapper[4768]: I1203 17:38:26.028733 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:38:26 crc kubenswrapper[4768]: I1203 17:38:26.029138 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:38:34 crc kubenswrapper[4768]: I1203 17:38:34.264849 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:34 crc kubenswrapper[4768]: I1203 17:38:34.327048 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:34 crc kubenswrapper[4768]: I1203 17:38:34.502820 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gx8f6"] Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.059538 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gx8f6" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="registry-server" containerID="cri-o://ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8" gracePeriod=2 Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.659998 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.729968 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-catalog-content\") pod \"6b58b54b-3599-440c-9ba0-e531eaf2b500\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.730103 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-utilities\") pod \"6b58b54b-3599-440c-9ba0-e531eaf2b500\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.730354 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqz9k\" (UniqueName: \"kubernetes.io/projected/6b58b54b-3599-440c-9ba0-e531eaf2b500-kube-api-access-cqz9k\") pod \"6b58b54b-3599-440c-9ba0-e531eaf2b500\" (UID: \"6b58b54b-3599-440c-9ba0-e531eaf2b500\") " Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.732910 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-utilities" (OuterVolumeSpecName: "utilities") pod "6b58b54b-3599-440c-9ba0-e531eaf2b500" (UID: "6b58b54b-3599-440c-9ba0-e531eaf2b500"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.736460 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b58b54b-3599-440c-9ba0-e531eaf2b500-kube-api-access-cqz9k" (OuterVolumeSpecName: "kube-api-access-cqz9k") pod "6b58b54b-3599-440c-9ba0-e531eaf2b500" (UID: "6b58b54b-3599-440c-9ba0-e531eaf2b500"). InnerVolumeSpecName "kube-api-access-cqz9k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.832884 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqz9k\" (UniqueName: \"kubernetes.io/projected/6b58b54b-3599-440c-9ba0-e531eaf2b500-kube-api-access-cqz9k\") on node \"crc\" DevicePath \"\"" Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.832931 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.845063 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b58b54b-3599-440c-9ba0-e531eaf2b500" (UID: "6b58b54b-3599-440c-9ba0-e531eaf2b500"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:38:36 crc kubenswrapper[4768]: I1203 17:38:36.935087 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b58b54b-3599-440c-9ba0-e531eaf2b500-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.070378 4768 generic.go:334] "Generic (PLEG): container finished" podID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerID="ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8" exitCode=0 Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.070411 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gx8f6" event={"ID":"6b58b54b-3599-440c-9ba0-e531eaf2b500","Type":"ContainerDied","Data":"ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8"} Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.070435 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gx8f6" event={"ID":"6b58b54b-3599-440c-9ba0-e531eaf2b500","Type":"ContainerDied","Data":"0ba41d99c1d0b0d69f0fc0d90a9bfa6cb5fafb11e18e1471416eabf26043ac4b"} Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.070452 4768 scope.go:117] "RemoveContainer" containerID="ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.070504 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gx8f6" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.093692 4768 scope.go:117] "RemoveContainer" containerID="740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.112562 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gx8f6"] Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.126100 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gx8f6"] Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.133869 4768 scope.go:117] "RemoveContainer" containerID="5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.182968 4768 scope.go:117] "RemoveContainer" containerID="ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8" Dec 03 17:38:37 crc kubenswrapper[4768]: E1203 17:38:37.183678 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8\": container with ID starting with ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8 not found: ID does not exist" containerID="ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.183859 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8"} err="failed to get container status \"ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8\": rpc error: code = NotFound desc = could not find container \"ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8\": container with ID starting with ebac2ebf642d19ce8a3cd861c0ae82b82cfcd68ac5b56e284938fb278d2a28b8 not found: ID does not exist" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.183990 4768 scope.go:117] "RemoveContainer" containerID="740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931" Dec 03 17:38:37 crc kubenswrapper[4768]: E1203 17:38:37.184396 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931\": container with ID starting with 740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931 not found: ID does not exist" containerID="740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.184507 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931"} err="failed to get container status \"740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931\": rpc error: code = NotFound desc = could not find container \"740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931\": container with ID starting with 740662a79f3b356e8d01a1be33b027d6fa4aa968e53612d1809460707742a931 not found: ID does not exist" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.184662 4768 scope.go:117] "RemoveContainer" containerID="5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c" Dec 03 17:38:37 crc kubenswrapper[4768]: E1203 17:38:37.185139 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c\": container with ID starting with 5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c not found: ID does not exist" containerID="5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.185246 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c"} err="failed to get container status \"5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c\": rpc error: code = NotFound desc = could not find container \"5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c\": container with ID starting with 5af412959a51f6edca15511349901af1307fa6e3ee72a599a629e2943a79386c not found: ID does not exist" Dec 03 17:38:37 crc kubenswrapper[4768]: I1203 17:38:37.544481 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" path="/var/lib/kubelet/pods/6b58b54b-3599-440c-9ba0-e531eaf2b500/volumes" Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.028868 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.029449 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.029494 4768 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.030280 4768 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4fb23c0558bff3cc5d90f09a76ce856d0c0d0318cec70c18d259eca607f4921c"} pod="openshift-machine-config-operator/machine-config-daemon-g94rv" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.030333 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" containerID="cri-o://4fb23c0558bff3cc5d90f09a76ce856d0c0d0318cec70c18d259eca607f4921c" gracePeriod=600 Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.269907 4768 generic.go:334] "Generic (PLEG): container finished" podID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerID="4fb23c0558bff3cc5d90f09a76ce856d0c0d0318cec70c18d259eca607f4921c" exitCode=0 Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.270193 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerDied","Data":"4fb23c0558bff3cc5d90f09a76ce856d0c0d0318cec70c18d259eca607f4921c"} Dec 03 17:38:56 crc kubenswrapper[4768]: I1203 17:38:56.270226 4768 scope.go:117] "RemoveContainer" containerID="3ed442cf33505096744ba4fd777ef2ee767c6070eefa9f84c2ba6ad2c64d0a76" Dec 03 17:38:57 crc kubenswrapper[4768]: I1203 17:38:57.282754 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" event={"ID":"fea3ff9c-dadb-4168-90c0-24bc05a888e9","Type":"ContainerStarted","Data":"8bbb8564dd6b1f4a1b849d5d4e7b95f0de93e30eff8cb42f2336e8ce824d110f"} Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.210847 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4ss9w"] Dec 03 17:40:19 crc kubenswrapper[4768]: E1203 17:40:19.214992 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="extract-content" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.215032 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="extract-content" Dec 03 17:40:19 crc kubenswrapper[4768]: E1203 17:40:19.215072 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="extract-utilities" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.215082 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="extract-utilities" Dec 03 17:40:19 crc kubenswrapper[4768]: E1203 17:40:19.215106 4768 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="registry-server" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.215114 4768 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="registry-server" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.216669 4768 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b58b54b-3599-440c-9ba0-e531eaf2b500" containerName="registry-server" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.228669 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.230358 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ss9w"] Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.389805 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df67n\" (UniqueName: \"kubernetes.io/projected/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-kube-api-access-df67n\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.389896 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-catalog-content\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.389938 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-utilities\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.492316 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df67n\" (UniqueName: \"kubernetes.io/projected/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-kube-api-access-df67n\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.492402 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-catalog-content\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.492445 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-utilities\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.493032 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-catalog-content\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.493074 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-utilities\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.516715 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df67n\" (UniqueName: \"kubernetes.io/projected/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-kube-api-access-df67n\") pod \"redhat-marketplace-4ss9w\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:19 crc kubenswrapper[4768]: I1203 17:40:19.559137 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:20 crc kubenswrapper[4768]: I1203 17:40:20.048843 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ss9w"] Dec 03 17:40:20 crc kubenswrapper[4768]: I1203 17:40:20.124245 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ss9w" event={"ID":"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57","Type":"ContainerStarted","Data":"dd385a41e8ee0891ca93a67c22f4bdba6a973a648c29c08a89cedf8be4c869b1"} Dec 03 17:40:20 crc kubenswrapper[4768]: I1203 17:40:20.982944 4768 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tgwrs"] Dec 03 17:40:20 crc kubenswrapper[4768]: I1203 17:40:20.986109 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:20 crc kubenswrapper[4768]: I1203 17:40:20.992840 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tgwrs"] Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.126702 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-utilities\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.126789 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-catalog-content\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.126895 4768 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtrj5\" (UniqueName: \"kubernetes.io/projected/d18d3e98-8221-453a-a899-cd7cc0aa94f9-kube-api-access-mtrj5\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.144238 4768 generic.go:334] "Generic (PLEG): container finished" podID="8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" containerID="ad408a5f9e51c2d5bb7aa041efad532f8f3beb21a76083e7bb90cfb9d8c2c202" exitCode=0 Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.144294 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ss9w" event={"ID":"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57","Type":"ContainerDied","Data":"ad408a5f9e51c2d5bb7aa041efad532f8f3beb21a76083e7bb90cfb9d8c2c202"} Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.228680 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtrj5\" (UniqueName: \"kubernetes.io/projected/d18d3e98-8221-453a-a899-cd7cc0aa94f9-kube-api-access-mtrj5\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.228864 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-utilities\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.228911 4768 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-catalog-content\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.229758 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-catalog-content\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.230275 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-utilities\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.260556 4768 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtrj5\" (UniqueName: \"kubernetes.io/projected/d18d3e98-8221-453a-a899-cd7cc0aa94f9-kube-api-access-mtrj5\") pod \"certified-operators-tgwrs\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.308799 4768 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:21 crc kubenswrapper[4768]: I1203 17:40:21.844017 4768 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tgwrs"] Dec 03 17:40:21 crc kubenswrapper[4768]: W1203 17:40:21.847917 4768 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd18d3e98_8221_453a_a899_cd7cc0aa94f9.slice/crio-7b58f78542dc0ca82af54c9f862d35728f7275c2439e8c1ab74b751a931cdd96 WatchSource:0}: Error finding container 7b58f78542dc0ca82af54c9f862d35728f7275c2439e8c1ab74b751a931cdd96: Status 404 returned error can't find the container with id 7b58f78542dc0ca82af54c9f862d35728f7275c2439e8c1ab74b751a931cdd96 Dec 03 17:40:22 crc kubenswrapper[4768]: I1203 17:40:22.154045 4768 generic.go:334] "Generic (PLEG): container finished" podID="d18d3e98-8221-453a-a899-cd7cc0aa94f9" containerID="95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f" exitCode=0 Dec 03 17:40:22 crc kubenswrapper[4768]: I1203 17:40:22.154121 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgwrs" event={"ID":"d18d3e98-8221-453a-a899-cd7cc0aa94f9","Type":"ContainerDied","Data":"95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f"} Dec 03 17:40:22 crc kubenswrapper[4768]: I1203 17:40:22.154148 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgwrs" event={"ID":"d18d3e98-8221-453a-a899-cd7cc0aa94f9","Type":"ContainerStarted","Data":"7b58f78542dc0ca82af54c9f862d35728f7275c2439e8c1ab74b751a931cdd96"} Dec 03 17:40:22 crc kubenswrapper[4768]: I1203 17:40:22.156517 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ss9w" event={"ID":"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57","Type":"ContainerStarted","Data":"5d25dfbdf8fe47c89238724a55f12b02431d41d4080783c3dec12dd09104dea0"} Dec 03 17:40:23 crc kubenswrapper[4768]: I1203 17:40:23.171310 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgwrs" event={"ID":"d18d3e98-8221-453a-a899-cd7cc0aa94f9","Type":"ContainerStarted","Data":"3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f"} Dec 03 17:40:23 crc kubenswrapper[4768]: I1203 17:40:23.174092 4768 generic.go:334] "Generic (PLEG): container finished" podID="8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" containerID="5d25dfbdf8fe47c89238724a55f12b02431d41d4080783c3dec12dd09104dea0" exitCode=0 Dec 03 17:40:23 crc kubenswrapper[4768]: I1203 17:40:23.174143 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ss9w" event={"ID":"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57","Type":"ContainerDied","Data":"5d25dfbdf8fe47c89238724a55f12b02431d41d4080783c3dec12dd09104dea0"} Dec 03 17:40:24 crc kubenswrapper[4768]: I1203 17:40:24.188221 4768 generic.go:334] "Generic (PLEG): container finished" podID="d18d3e98-8221-453a-a899-cd7cc0aa94f9" containerID="3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f" exitCode=0 Dec 03 17:40:24 crc kubenswrapper[4768]: I1203 17:40:24.188296 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgwrs" event={"ID":"d18d3e98-8221-453a-a899-cd7cc0aa94f9","Type":"ContainerDied","Data":"3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f"} Dec 03 17:40:24 crc kubenswrapper[4768]: I1203 17:40:24.193976 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ss9w" event={"ID":"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57","Type":"ContainerStarted","Data":"1e8bd9843b41d6acb642660aa5aad0ec6e091669ba1187846383ef7a253b0247"} Dec 03 17:40:24 crc kubenswrapper[4768]: I1203 17:40:24.232163 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4ss9w" podStartSLOduration=2.7418213529999997 podStartE2EDuration="5.232143093s" podCreationTimestamp="2025-12-03 17:40:19 +0000 UTC" firstStartedPulling="2025-12-03 17:40:21.146625727 +0000 UTC m=+4918.065962160" lastFinishedPulling="2025-12-03 17:40:23.636947477 +0000 UTC m=+4920.556283900" observedRunningTime="2025-12-03 17:40:24.230576841 +0000 UTC m=+4921.149913274" watchObservedRunningTime="2025-12-03 17:40:24.232143093 +0000 UTC m=+4921.151479536" Dec 03 17:40:25 crc kubenswrapper[4768]: I1203 17:40:25.211404 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgwrs" event={"ID":"d18d3e98-8221-453a-a899-cd7cc0aa94f9","Type":"ContainerStarted","Data":"0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c"} Dec 03 17:40:25 crc kubenswrapper[4768]: I1203 17:40:25.233380 4768 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tgwrs" podStartSLOduration=2.795733948 podStartE2EDuration="5.233360807s" podCreationTimestamp="2025-12-03 17:40:20 +0000 UTC" firstStartedPulling="2025-12-03 17:40:22.15709872 +0000 UTC m=+4919.076435153" lastFinishedPulling="2025-12-03 17:40:24.594725589 +0000 UTC m=+4921.514062012" observedRunningTime="2025-12-03 17:40:25.23088315 +0000 UTC m=+4922.150219583" watchObservedRunningTime="2025-12-03 17:40:25.233360807 +0000 UTC m=+4922.152697230" Dec 03 17:40:29 crc kubenswrapper[4768]: I1203 17:40:29.560002 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:29 crc kubenswrapper[4768]: I1203 17:40:29.560618 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:29 crc kubenswrapper[4768]: I1203 17:40:29.616704 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:30 crc kubenswrapper[4768]: I1203 17:40:30.380085 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:30 crc kubenswrapper[4768]: I1203 17:40:30.478920 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ss9w"] Dec 03 17:40:31 crc kubenswrapper[4768]: I1203 17:40:31.309365 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:31 crc kubenswrapper[4768]: I1203 17:40:31.310221 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:31 crc kubenswrapper[4768]: I1203 17:40:31.357994 4768 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:32 crc kubenswrapper[4768]: I1203 17:40:32.289008 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4ss9w" podUID="8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" containerName="registry-server" containerID="cri-o://1e8bd9843b41d6acb642660aa5aad0ec6e091669ba1187846383ef7a253b0247" gracePeriod=2 Dec 03 17:40:32 crc kubenswrapper[4768]: I1203 17:40:32.346811 4768 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.252877 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tgwrs"] Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.301478 4768 generic.go:334] "Generic (PLEG): container finished" podID="8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" containerID="1e8bd9843b41d6acb642660aa5aad0ec6e091669ba1187846383ef7a253b0247" exitCode=0 Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.301621 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ss9w" event={"ID":"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57","Type":"ContainerDied","Data":"1e8bd9843b41d6acb642660aa5aad0ec6e091669ba1187846383ef7a253b0247"} Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.301694 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4ss9w" event={"ID":"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57","Type":"ContainerDied","Data":"dd385a41e8ee0891ca93a67c22f4bdba6a973a648c29c08a89cedf8be4c869b1"} Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.301722 4768 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd385a41e8ee0891ca93a67c22f4bdba6a973a648c29c08a89cedf8be4c869b1" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.659427 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.807041 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-utilities\") pod \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.807118 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-catalog-content\") pod \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.807335 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-df67n\" (UniqueName: \"kubernetes.io/projected/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-kube-api-access-df67n\") pod \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\" (UID: \"8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57\") " Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.810695 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-utilities" (OuterVolumeSpecName: "utilities") pod "8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" (UID: "8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.817908 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-kube-api-access-df67n" (OuterVolumeSpecName: "kube-api-access-df67n") pod "8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" (UID: "8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57"). InnerVolumeSpecName "kube-api-access-df67n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.829050 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" (UID: "8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.910443 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-df67n\" (UniqueName: \"kubernetes.io/projected/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-kube-api-access-df67n\") on node \"crc\" DevicePath \"\"" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.910497 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:40:33 crc kubenswrapper[4768]: I1203 17:40:33.910512 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.311351 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4ss9w" Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.311556 4768 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-tgwrs" podUID="d18d3e98-8221-453a-a899-cd7cc0aa94f9" containerName="registry-server" containerID="cri-o://0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c" gracePeriod=2 Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.357234 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ss9w"] Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.367196 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4ss9w"] Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.853579 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.929247 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-utilities\") pod \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.929442 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtrj5\" (UniqueName: \"kubernetes.io/projected/d18d3e98-8221-453a-a899-cd7cc0aa94f9-kube-api-access-mtrj5\") pod \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.929644 4768 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-catalog-content\") pod \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\" (UID: \"d18d3e98-8221-453a-a899-cd7cc0aa94f9\") " Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.930781 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-utilities" (OuterVolumeSpecName: "utilities") pod "d18d3e98-8221-453a-a899-cd7cc0aa94f9" (UID: "d18d3e98-8221-453a-a899-cd7cc0aa94f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.936144 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d18d3e98-8221-453a-a899-cd7cc0aa94f9-kube-api-access-mtrj5" (OuterVolumeSpecName: "kube-api-access-mtrj5") pod "d18d3e98-8221-453a-a899-cd7cc0aa94f9" (UID: "d18d3e98-8221-453a-a899-cd7cc0aa94f9"). InnerVolumeSpecName "kube-api-access-mtrj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 03 17:40:34 crc kubenswrapper[4768]: I1203 17:40:34.990773 4768 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d18d3e98-8221-453a-a899-cd7cc0aa94f9" (UID: "d18d3e98-8221-453a-a899-cd7cc0aa94f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.032666 4768 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.032720 4768 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d18d3e98-8221-453a-a899-cd7cc0aa94f9-utilities\") on node \"crc\" DevicePath \"\"" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.032734 4768 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtrj5\" (UniqueName: \"kubernetes.io/projected/d18d3e98-8221-453a-a899-cd7cc0aa94f9-kube-api-access-mtrj5\") on node \"crc\" DevicePath \"\"" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.324377 4768 generic.go:334] "Generic (PLEG): container finished" podID="d18d3e98-8221-453a-a899-cd7cc0aa94f9" containerID="0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c" exitCode=0 Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.324429 4768 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tgwrs" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.324431 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgwrs" event={"ID":"d18d3e98-8221-453a-a899-cd7cc0aa94f9","Type":"ContainerDied","Data":"0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c"} Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.324899 4768 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tgwrs" event={"ID":"d18d3e98-8221-453a-a899-cd7cc0aa94f9","Type":"ContainerDied","Data":"7b58f78542dc0ca82af54c9f862d35728f7275c2439e8c1ab74b751a931cdd96"} Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.324925 4768 scope.go:117] "RemoveContainer" containerID="0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.567137 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57" path="/var/lib/kubelet/pods/8d6fdfdb-cd0d-4c64-9afc-0afb07fc6b57/volumes" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.918071 4768 scope.go:117] "RemoveContainer" containerID="3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f" Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.934270 4768 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-tgwrs"] Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.945481 4768 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-tgwrs"] Dec 03 17:40:35 crc kubenswrapper[4768]: I1203 17:40:35.961425 4768 scope.go:117] "RemoveContainer" containerID="95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f" Dec 03 17:40:36 crc kubenswrapper[4768]: I1203 17:40:36.001796 4768 scope.go:117] "RemoveContainer" containerID="0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c" Dec 03 17:40:36 crc kubenswrapper[4768]: E1203 17:40:36.002378 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c\": container with ID starting with 0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c not found: ID does not exist" containerID="0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c" Dec 03 17:40:36 crc kubenswrapper[4768]: I1203 17:40:36.002548 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c"} err="failed to get container status \"0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c\": rpc error: code = NotFound desc = could not find container \"0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c\": container with ID starting with 0993ae7c37acd18b53285be478bb916b30ca0f279e3c95c2d7803c4d9fc75b9c not found: ID does not exist" Dec 03 17:40:36 crc kubenswrapper[4768]: I1203 17:40:36.002703 4768 scope.go:117] "RemoveContainer" containerID="3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f" Dec 03 17:40:36 crc kubenswrapper[4768]: E1203 17:40:36.003239 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f\": container with ID starting with 3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f not found: ID does not exist" containerID="3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f" Dec 03 17:40:36 crc kubenswrapper[4768]: I1203 17:40:36.003273 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f"} err="failed to get container status \"3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f\": rpc error: code = NotFound desc = could not find container \"3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f\": container with ID starting with 3ea99c7e29a43ea247e3ea95ec2f5727aa33b60b278c9520f7bef91a49daa43f not found: ID does not exist" Dec 03 17:40:36 crc kubenswrapper[4768]: I1203 17:40:36.003315 4768 scope.go:117] "RemoveContainer" containerID="95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f" Dec 03 17:40:36 crc kubenswrapper[4768]: E1203 17:40:36.003687 4768 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f\": container with ID starting with 95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f not found: ID does not exist" containerID="95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f" Dec 03 17:40:36 crc kubenswrapper[4768]: I1203 17:40:36.003791 4768 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f"} err="failed to get container status \"95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f\": rpc error: code = NotFound desc = could not find container \"95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f\": container with ID starting with 95ed083dd1e6ec8c58254120a4dba1d407d4300ad0761020d403754b10115a7f not found: ID does not exist" Dec 03 17:40:37 crc kubenswrapper[4768]: I1203 17:40:37.546892 4768 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d18d3e98-8221-453a-a899-cd7cc0aa94f9" path="/var/lib/kubelet/pods/d18d3e98-8221-453a-a899-cd7cc0aa94f9/volumes" Dec 03 17:40:56 crc kubenswrapper[4768]: I1203 17:40:56.028406 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:40:56 crc kubenswrapper[4768]: I1203 17:40:56.029077 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 03 17:41:26 crc kubenswrapper[4768]: I1203 17:41:26.028266 4768 patch_prober.go:28] interesting pod/machine-config-daemon-g94rv container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 03 17:41:26 crc kubenswrapper[4768]: I1203 17:41:26.028994 4768 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g94rv" podUID="fea3ff9c-dadb-4168-90c0-24bc05a888e9" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114073127024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114073130017355 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114060775016513 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114060775015463 5ustar corecore